diff --git a/Lib/site-packages/Flask_Executor-1.0.0.dist-info/INSTALLER b/Lib/site-packages/Flask_Executor-1.0.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/Lib/site-packages/Flask_Executor-1.0.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Lib/site-packages/Flask_Executor-1.0.0.dist-info/LICENSE b/Lib/site-packages/Flask_Executor-1.0.0.dist-info/LICENSE
new file mode 100644
index 0000000..334a087
--- /dev/null
+++ b/Lib/site-packages/Flask_Executor-1.0.0.dist-info/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2018 Dave Chevell
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/Lib/site-packages/Flask_Executor-1.0.0.dist-info/METADATA b/Lib/site-packages/Flask_Executor-1.0.0.dist-info/METADATA
new file mode 100644
index 0000000..e2f3438
--- /dev/null
+++ b/Lib/site-packages/Flask_Executor-1.0.0.dist-info/METADATA
@@ -0,0 +1,154 @@
+Metadata-Version: 2.1
+Name: Flask-Executor
+Version: 1.0.0
+Summary: An easy to use Flask wrapper for concurrent.futures
+Home-page: https://github.com/dchevell/flask-executor
+Author: Dave Chevell
+Author-email: chevell@gmail.com
+License: MIT
+Keywords: flask,concurrent.futures
+Classifier: Programming Language :: Python :: 3
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Requires-Dist: Flask
+Requires-Dist: futures (>=3.1.1) ; python_version == "2.7"
+Provides-Extra: test
+Requires-Dist: pytest ; extra == 'test'
+Requires-Dist: flask-sqlalchemy ; extra == 'test'
+
+Flask-Executor
+==============
+
+[
+[](https://coveralls.io/github/dchevell/flask-executor)
+[](https://pypi.python.org/pypi/Flask-Executor)
+[](https://github.com/dchevell/flask-executor/blob/master/LICENSE)
+
+Sometimes you need a simple task queue without the overhead of separate worker processes or powerful-but-complex libraries beyond your requirements. Flask-Executor is an easy to use wrapper for the `concurrent.futures` module that lets you initialise and configure executors via common Flask application patterns. It's a great way to get up and running fast with a lightweight in-process task queue.
+
+Installation
+------------
+
+Flask-Executor is available on PyPI and can be installed with:
+
+ pip install flask-executor
+
+
+Quick start
+-----------
+
+Here's a quick example of using Flask-Executor inside your Flask application:
+
+```python
+from flask import Flask
+from flask_executor import Executor
+
+app = Flask(__name__)
+
+executor = Executor(app)
+
+
+def send_email(recipient, subject, body):
+ # Magic to send an email
+ return True
+
+
+@app.route('/signup')
+def signup():
+ # Do signup form
+ executor.submit(send_email, recipient, subject, body)
+```
+
+
+Contexts
+--------
+
+When calling `submit()` or `map()` Flask-Executor will wrap `ThreadPoolExecutor` callables with a
+copy of both the current application context and current request context. Code that must be run in
+these contexts or that depends on information or configuration stored in `flask.current_app`,
+`flask.request` or `flask.g` can be submitted to the executor without modification.
+
+Note: due to limitations in Python's default object serialisation and a lack of shared memory space between subprocesses, contexts cannot be pushed to `ProcessPoolExecutor()` workers.
+
+
+Futures
+-------
+
+You may want to preserve access to Futures returned from the executor, so that you can retrieve the
+results in a different part of your application. Flask-Executor allows Futures to be stored within
+the executor itself and provides methods for querying and returning them in different parts of your
+app::
+
+```python
+@app.route('/start-task')
+def start_task():
+ executor.submit_stored('calc_power', pow, 323, 1235)
+ return jsonify({'result':'success'})
+
+@app.route('/get-result')
+def get_result():
+ if not executor.futures.done('calc_power'):
+ return jsonify({'status': executor.futures._state('calc_power')})
+ future = executor.futures.pop('calc_power')
+ return jsonify({'status': done, 'result': future.result()})
+```
+
+
+Decoration
+----------
+
+Flask-Executor lets you decorate methods in the same style as distributed task queues like
+Celery:
+
+```python
+@executor.job
+def fib(n):
+ if n <= 2:
+ return 1
+ else:
+ return fib(n-1) + fib(n-2)
+
+@app.route('/decorate_fib')
+def decorate_fib():
+ fib.submit(5)
+ fib.submit_stored('fibonacci', 5)
+ fib.map(range(1, 6))
+ return 'OK'
+```
+
+
+Default Callbacks
+-----------------
+
+Future objects can have callbacks attached by using `Future.add_done_callback`. Flask-Executor
+lets you specify default callbacks that will be applied to all new futures created by the executor:
+
+```python
+def some_callback(future):
+ # do something with future
+
+executor.add_default_done_callback(some_callback)
+
+# Callback will be added to the below task automatically
+executor.submit(pow, 323, 1235)
+```
+
+
+Propagate Exceptions
+--------------------
+
+Normally any exceptions thrown by background threads or processes will be swallowed unless explicitly
+checked for. To instead surface all exceptions thrown by background tasks, Flask-Executor can add
+a special default callback that raises any exceptions thrown by tasks submitted to the executor::
+
+```python
+app.config['EXECUTOR_PROPAGATE_EXCEPTIONS'] = True
+```
+
+
+Documentation
+-------------
+
+Check out the full documentation at [flask-executor.readthedocs.io](https://flask-executor.readthedocs.io)!
diff --git a/Lib/site-packages/Flask_Executor-1.0.0.dist-info/RECORD b/Lib/site-packages/Flask_Executor-1.0.0.dist-info/RECORD
new file mode 100644
index 0000000..46a74d0
--- /dev/null
+++ b/Lib/site-packages/Flask_Executor-1.0.0.dist-info/RECORD
@@ -0,0 +1,15 @@
+Flask_Executor-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+Flask_Executor-1.0.0.dist-info/LICENSE,sha256=6wCK21C3pfNZOJJUGvAjrKBKtGhvqCxop5s1USzl2EE,1069
+Flask_Executor-1.0.0.dist-info/METADATA,sha256=tq8Rvl4gigcpRZCIxUomORoL8dX1R_ng5h7wD3aT1Ak,4895
+Flask_Executor-1.0.0.dist-info/RECORD,,
+Flask_Executor-1.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+Flask_Executor-1.0.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+Flask_Executor-1.0.0.dist-info/top_level.txt,sha256=flo7yNzLA3XOdqCsrBcgxTWSDn6ks0C34DBDp1u-Ibo,15
+flask_executor/__init__.py,sha256=-0UZfkoQ_-_ahVe8CTRRqBuOpDnJmWdwpBYgE7lg0pQ,93
+flask_executor/__pycache__/__init__.cpython-312.pyc,,
+flask_executor/__pycache__/executor.cpython-312.pyc,,
+flask_executor/__pycache__/futures.cpython-312.pyc,,
+flask_executor/__pycache__/helpers.cpython-312.pyc,,
+flask_executor/executor.py,sha256=LZTDevzs5y1INri1S0qepZI8mx68UpDClessFQVpr7M,10898
+flask_executor/futures.py,sha256=5ZeN3DHkzqHpIW7JIli7rJhu2b0nVpucKgULSk5GqV4,4101
+flask_executor/helpers.py,sha256=mgEiYBpmxuLqV8jrtdDS4c0FiMQhYGsrwZYAZdKJcyg,1185
diff --git a/Lib/site-packages/Flask_Executor-1.0.0.dist-info/REQUESTED b/Lib/site-packages/Flask_Executor-1.0.0.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/Flask_Executor-1.0.0.dist-info/WHEEL b/Lib/site-packages/Flask_Executor-1.0.0.dist-info/WHEEL
new file mode 100644
index 0000000..becc9a6
--- /dev/null
+++ b/Lib/site-packages/Flask_Executor-1.0.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/Lib/site-packages/Flask_Executor-1.0.0.dist-info/top_level.txt b/Lib/site-packages/Flask_Executor-1.0.0.dist-info/top_level.txt
new file mode 100644
index 0000000..81f38a7
--- /dev/null
+++ b/Lib/site-packages/Flask_Executor-1.0.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+flask_executor
diff --git a/Lib/site-packages/Jinja2-3.1.3.dist-info/INSTALLER b/Lib/site-packages/Jinja2-3.1.3.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/Lib/site-packages/Jinja2-3.1.3.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Lib/site-packages/Jinja2-3.1.3.dist-info/LICENSE.rst b/Lib/site-packages/Jinja2-3.1.3.dist-info/LICENSE.rst
new file mode 100644
index 0000000..c37cae4
--- /dev/null
+++ b/Lib/site-packages/Jinja2-3.1.3.dist-info/LICENSE.rst
@@ -0,0 +1,28 @@
+Copyright 2007 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/Lib/site-packages/Jinja2-3.1.3.dist-info/METADATA b/Lib/site-packages/Jinja2-3.1.3.dist-info/METADATA
new file mode 100644
index 0000000..56e9429
--- /dev/null
+++ b/Lib/site-packages/Jinja2-3.1.3.dist-info/METADATA
@@ -0,0 +1,105 @@
+Metadata-Version: 2.1
+Name: Jinja2
+Version: 3.1.3
+Summary: A very fast and expressive template engine.
+Home-page: https://palletsprojects.com/p/jinja/
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Donate, https://palletsprojects.com/donate
+Project-URL: Documentation, https://jinja.palletsprojects.com/
+Project-URL: Changes, https://jinja.palletsprojects.com/changes/
+Project-URL: Source Code, https://github.com/pallets/jinja/
+Project-URL: Issue Tracker, https://github.com/pallets/jinja/issues/
+Project-URL: Chat, https://discord.gg/pallets
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Text Processing :: Markup :: HTML
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE.rst
+Requires-Dist: MarkupSafe >=2.0
+Provides-Extra: i18n
+Requires-Dist: Babel >=2.7 ; extra == 'i18n'
+
+Jinja
+=====
+
+Jinja is a fast, expressive, extensible templating engine. Special
+placeholders in the template allow writing code similar to Python
+syntax. Then the template is passed data to render the final document.
+
+It includes:
+
+- Template inheritance and inclusion.
+- Define and import macros within templates.
+- HTML templates can use autoescaping to prevent XSS from untrusted
+ user input.
+- A sandboxed environment can safely render untrusted templates.
+- AsyncIO support for generating templates and calling async
+ functions.
+- I18N support with Babel.
+- Templates are compiled to optimized Python code just-in-time and
+ cached, or can be compiled ahead-of-time.
+- Exceptions point to the correct line in templates to make debugging
+ easier.
+- Extensible filters, tests, functions, and even syntax.
+
+Jinja's philosophy is that while application logic belongs in Python if
+possible, it shouldn't make the template designer's job difficult by
+restricting functionality too much.
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+ $ pip install -U Jinja2
+
+.. _pip: https://pip.pypa.io/en/stable/getting-started/
+
+
+In A Nutshell
+-------------
+
+.. code-block:: jinja
+
+ {% extends "base.html" %}
+ {% block title %}Members{% endblock %}
+ {% block content %}
+
+ {% endblock %}
+
+
+Donate
+------
+
+The Pallets organization develops and supports Jinja and other popular
+packages. In order to grow the community of contributors and users, and
+allow the maintainers to devote more time to the projects, `please
+donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+- Documentation: https://jinja.palletsprojects.com/
+- Changes: https://jinja.palletsprojects.com/changes/
+- PyPI Releases: https://pypi.org/project/Jinja2/
+- Source Code: https://github.com/pallets/jinja/
+- Issue Tracker: https://github.com/pallets/jinja/issues/
+- Chat: https://discord.gg/pallets
diff --git a/Lib/site-packages/Jinja2-3.1.3.dist-info/RECORD b/Lib/site-packages/Jinja2-3.1.3.dist-info/RECORD
new file mode 100644
index 0000000..70419d6
--- /dev/null
+++ b/Lib/site-packages/Jinja2-3.1.3.dist-info/RECORD
@@ -0,0 +1,58 @@
+Jinja2-3.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+Jinja2-3.1.3.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
+Jinja2-3.1.3.dist-info/METADATA,sha256=0cLNbRCI91jytc7Bzv3XAQfZzFDF2gxkJuH46eF5vew,3301
+Jinja2-3.1.3.dist-info/RECORD,,
+Jinja2-3.1.3.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
+Jinja2-3.1.3.dist-info/entry_points.txt,sha256=zRd62fbqIyfUpsRtU7EVIFyiu1tPwfgO7EvPErnxgTE,59
+Jinja2-3.1.3.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7
+jinja2/__init__.py,sha256=NTBwMwsECrdHmxeXF7seusHLzrh6Ldn1A9qhS5cDuf0,1927
+jinja2/__pycache__/__init__.cpython-312.pyc,,
+jinja2/__pycache__/_identifier.cpython-312.pyc,,
+jinja2/__pycache__/async_utils.cpython-312.pyc,,
+jinja2/__pycache__/bccache.cpython-312.pyc,,
+jinja2/__pycache__/compiler.cpython-312.pyc,,
+jinja2/__pycache__/constants.cpython-312.pyc,,
+jinja2/__pycache__/debug.cpython-312.pyc,,
+jinja2/__pycache__/defaults.cpython-312.pyc,,
+jinja2/__pycache__/environment.cpython-312.pyc,,
+jinja2/__pycache__/exceptions.cpython-312.pyc,,
+jinja2/__pycache__/ext.cpython-312.pyc,,
+jinja2/__pycache__/filters.cpython-312.pyc,,
+jinja2/__pycache__/idtracking.cpython-312.pyc,,
+jinja2/__pycache__/lexer.cpython-312.pyc,,
+jinja2/__pycache__/loaders.cpython-312.pyc,,
+jinja2/__pycache__/meta.cpython-312.pyc,,
+jinja2/__pycache__/nativetypes.cpython-312.pyc,,
+jinja2/__pycache__/nodes.cpython-312.pyc,,
+jinja2/__pycache__/optimizer.cpython-312.pyc,,
+jinja2/__pycache__/parser.cpython-312.pyc,,
+jinja2/__pycache__/runtime.cpython-312.pyc,,
+jinja2/__pycache__/sandbox.cpython-312.pyc,,
+jinja2/__pycache__/tests.cpython-312.pyc,,
+jinja2/__pycache__/utils.cpython-312.pyc,,
+jinja2/__pycache__/visitor.cpython-312.pyc,,
+jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958
+jinja2/async_utils.py,sha256=dFcmh6lMNfbh7eLKrBio8JqAKLHdZbpCuurFN4OERtY,2447
+jinja2/bccache.py,sha256=mhz5xtLxCcHRAa56azOhphIAe19u1we0ojifNMClDio,14061
+jinja2/compiler.py,sha256=PJzYdRLStlEOqmnQs1YxlizPrJoj3jTZuUleREn6AIQ,72199
+jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433
+jinja2/debug.py,sha256=iWJ432RadxJNnaMOPrjIDInz50UEgni3_HKuFXi2vuQ,6299
+jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267
+jinja2/environment.py,sha256=0qldX3VQKZcm6lgn7zHz94oRFow7YPYERiqkquomNjU,61253
+jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071
+jinja2/ext.py,sha256=5fnMpllaXkfm2P_93RIvi-OnK7Tk8mCW8Du-GcD12Hc,31844
+jinja2/filters.py,sha256=vYjKb2zaPShvYtn_LpSmqfS8SScbrA_KOanNibsMDIE,53862
+jinja2/idtracking.py,sha256=GfNmadir4oDALVxzn3DL9YInhJDr69ebXeA2ygfuCGA,10704
+jinja2/lexer.py,sha256=DW2nX9zk-6MWp65YR2bqqj0xqCvLtD-u9NWT8AnFRxQ,29726
+jinja2/loaders.py,sha256=ayAwxfrA1SAffQta0nwSDm3TDT4KYiIGN_D9Z45B310,23085
+jinja2/meta.py,sha256=GNPEvifmSaU3CMxlbheBOZjeZ277HThOPUTf1RkppKQ,4396
+jinja2/nativetypes.py,sha256=7GIGALVJgdyL80oZJdQUaUfwSt5q2lSSZbXt0dNf_M4,4210
+jinja2/nodes.py,sha256=i34GPRAZexXMT6bwuf5SEyvdmS-bRCy9KMjwN5O6pjk,34550
+jinja2/optimizer.py,sha256=tHkMwXxfZkbfA1KmLcqmBMSaz7RLIvvItrJcPoXTyD8,1650
+jinja2/parser.py,sha256=Y199wPL-G67gJoi5G_5sHuu9uEP1PJkjjLEW_xTH8-k,39736
+jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+jinja2/runtime.py,sha256=_6LkKIWFJjQdqlrgA3K39zBFQ-7Orm3wGDm96RwxQoE,33406
+jinja2/sandbox.py,sha256=Y0xZeXQnH6EX5VjaV2YixESxoepnRbW_3UeQosaBU3M,14584
+jinja2/tests.py,sha256=Am5Z6Lmfr2XaH_npIfJJ8MdXtWsbLjMULZJulTAj30E,5905
+jinja2/utils.py,sha256=IMwRIcN1SsTw2-jdQtlH2KzNABsXZBW_-tnFXafQBvY,23933
+jinja2/visitor.py,sha256=MH14C6yq24G_KVtWzjwaI7Wg14PCJIYlWW1kpkxYak0,3568
diff --git a/Lib/site-packages/Jinja2-3.1.3.dist-info/WHEEL b/Lib/site-packages/Jinja2-3.1.3.dist-info/WHEEL
new file mode 100644
index 0000000..98c0d20
--- /dev/null
+++ b/Lib/site-packages/Jinja2-3.1.3.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.42.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/Lib/site-packages/Jinja2-3.1.3.dist-info/entry_points.txt b/Lib/site-packages/Jinja2-3.1.3.dist-info/entry_points.txt
new file mode 100644
index 0000000..7b9666c
--- /dev/null
+++ b/Lib/site-packages/Jinja2-3.1.3.dist-info/entry_points.txt
@@ -0,0 +1,2 @@
+[babel.extractors]
+jinja2 = jinja2.ext:babel_extract[i18n]
diff --git a/Lib/site-packages/Jinja2-3.1.3.dist-info/top_level.txt b/Lib/site-packages/Jinja2-3.1.3.dist-info/top_level.txt
new file mode 100644
index 0000000..7f7afbf
--- /dev/null
+++ b/Lib/site-packages/Jinja2-3.1.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+jinja2
diff --git a/Lib/site-packages/MarkupSafe-2.1.5.dist-info/INSTALLER b/Lib/site-packages/MarkupSafe-2.1.5.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/Lib/site-packages/MarkupSafe-2.1.5.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Lib/site-packages/MarkupSafe-2.1.5.dist-info/LICENSE.rst b/Lib/site-packages/MarkupSafe-2.1.5.dist-info/LICENSE.rst
new file mode 100644
index 0000000..9d227a0
--- /dev/null
+++ b/Lib/site-packages/MarkupSafe-2.1.5.dist-info/LICENSE.rst
@@ -0,0 +1,28 @@
+Copyright 2010 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/Lib/site-packages/MarkupSafe-2.1.5.dist-info/METADATA b/Lib/site-packages/MarkupSafe-2.1.5.dist-info/METADATA
new file mode 100644
index 0000000..dfe37d5
--- /dev/null
+++ b/Lib/site-packages/MarkupSafe-2.1.5.dist-info/METADATA
@@ -0,0 +1,93 @@
+Metadata-Version: 2.1
+Name: MarkupSafe
+Version: 2.1.5
+Summary: Safely add untrusted strings to HTML/XML markup.
+Home-page: https://palletsprojects.com/p/markupsafe/
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Donate, https://palletsprojects.com/donate
+Project-URL: Documentation, https://markupsafe.palletsprojects.com/
+Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/
+Project-URL: Source Code, https://github.com/pallets/markupsafe/
+Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/
+Project-URL: Chat, https://discord.gg/pallets
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Text Processing :: Markup :: HTML
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE.rst
+
+MarkupSafe
+==========
+
+MarkupSafe implements a text object that escapes characters so it is
+safe to use in HTML and XML. Characters that have special meanings are
+replaced so that they display as the actual characters. This mitigates
+injection attacks, meaning untrusted user input can safely be displayed
+on a page.
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+ pip install -U MarkupSafe
+
+.. _pip: https://pip.pypa.io/en/stable/getting-started/
+
+
+Examples
+--------
+
+.. code-block:: pycon
+
+ >>> from markupsafe import Markup, escape
+
+ >>> # escape replaces special characters and wraps in Markup
+ >>> escape("")
+ Markup('<script>alert(document.cookie);</script>')
+
+ >>> # wrap in Markup to mark text "safe" and prevent escaping
+ >>> Markup("Hello")
+ Markup('hello')
+
+ >>> escape(Markup("Hello"))
+ Markup('hello')
+
+ >>> # Markup is a str subclass
+ >>> # methods and operators escape their arguments
+ >>> template = Markup("Hello {name}")
+ >>> template.format(name='"World"')
+ Markup('Hello "World"')
+
+
+Donate
+------
+
+The Pallets organization develops and supports MarkupSafe and other
+popular packages. In order to grow the community of contributors and
+users, and allow the maintainers to devote more time to the projects,
+`please donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+- Documentation: https://markupsafe.palletsprojects.com/
+- Changes: https://markupsafe.palletsprojects.com/changes/
+- PyPI Releases: https://pypi.org/project/MarkupSafe/
+- Source Code: https://github.com/pallets/markupsafe/
+- Issue Tracker: https://github.com/pallets/markupsafe/issues/
+- Chat: https://discord.gg/pallets
diff --git a/Lib/site-packages/MarkupSafe-2.1.5.dist-info/RECORD b/Lib/site-packages/MarkupSafe-2.1.5.dist-info/RECORD
new file mode 100644
index 0000000..1b43b96
--- /dev/null
+++ b/Lib/site-packages/MarkupSafe-2.1.5.dist-info/RECORD
@@ -0,0 +1,14 @@
+MarkupSafe-2.1.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+MarkupSafe-2.1.5.dist-info/LICENSE.rst,sha256=RjHsDbX9kKVH4zaBcmTGeYIUM4FG-KyUtKV_lu6MnsQ,1503
+MarkupSafe-2.1.5.dist-info/METADATA,sha256=icNlaniV7YIQZ1BScCVqNaRtm7MAgfw8d3OBmoSVyAY,3096
+MarkupSafe-2.1.5.dist-info/RECORD,,
+MarkupSafe-2.1.5.dist-info/WHEEL,sha256=j9Aissza3750LQHFAQyYerNjmkEON1-8w_RaZNFtKSs,102
+MarkupSafe-2.1.5.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
+markupsafe/__init__.py,sha256=m1ysNeqf55zbEoJtaovca40ivrkEFolPlw5bGoC5Gi4,11290
+markupsafe/__pycache__/__init__.cpython-312.pyc,,
+markupsafe/__pycache__/_native.cpython-312.pyc,,
+markupsafe/_native.py,sha256=_Q7UsXCOvgdonCgqG3l5asANI6eo50EKnDM-mlwEC5M,1776
+markupsafe/_speedups.c,sha256=n3jzzaJwXcoN8nTFyA53f3vSqsWK2vujI-v6QYifjhQ,7403
+markupsafe/_speedups.cp312-win_amd64.pyd,sha256=CLz8k0mpvM-dgLP0eSHpGYHm8shlGxXoCinA12zgHsY,15872
+markupsafe/_speedups.pyi,sha256=f5QtwIOP0eLrxh2v5p6SmaYmlcHIGIfmz0DovaqL0OU,238
+markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/Lib/site-packages/MarkupSafe-2.1.5.dist-info/WHEEL b/Lib/site-packages/MarkupSafe-2.1.5.dist-info/WHEEL
new file mode 100644
index 0000000..1c1a93d
--- /dev/null
+++ b/Lib/site-packages/MarkupSafe-2.1.5.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.42.0)
+Root-Is-Purelib: false
+Tag: cp312-cp312-win_amd64
+
diff --git a/Lib/site-packages/MarkupSafe-2.1.5.dist-info/top_level.txt b/Lib/site-packages/MarkupSafe-2.1.5.dist-info/top_level.txt
new file mode 100644
index 0000000..75bf729
--- /dev/null
+++ b/Lib/site-packages/MarkupSafe-2.1.5.dist-info/top_level.txt
@@ -0,0 +1 @@
+markupsafe
diff --git a/Lib/site-packages/PIL/BdfFontFile.py b/Lib/site-packages/PIL/BdfFontFile.py
new file mode 100644
index 0000000..e3eda4f
--- /dev/null
+++ b/Lib/site-packages/PIL/BdfFontFile.py
@@ -0,0 +1,133 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# bitmap distribution font (bdf) file parser
+#
+# history:
+# 1996-05-16 fl created (as bdf2pil)
+# 1997-08-25 fl converted to FontFile driver
+# 2001-05-25 fl removed bogus __init__ call
+# 2002-11-20 fl robustification (from Kevin Cazabon, Dmitry Vasiliev)
+# 2003-04-22 fl more robustification (from Graham Dumpleton)
+#
+# Copyright (c) 1997-2003 by Secret Labs AB.
+# Copyright (c) 1997-2003 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+
+"""
+Parse X Bitmap Distribution Format (BDF)
+"""
+from __future__ import annotations
+
+from typing import BinaryIO
+
+from . import FontFile, Image
+
+bdf_slant = {
+ "R": "Roman",
+ "I": "Italic",
+ "O": "Oblique",
+ "RI": "Reverse Italic",
+ "RO": "Reverse Oblique",
+ "OT": "Other",
+}
+
+bdf_spacing = {"P": "Proportional", "M": "Monospaced", "C": "Cell"}
+
+
+def bdf_char(
+ f: BinaryIO,
+) -> (
+ tuple[
+ str,
+ int,
+ tuple[tuple[int, int], tuple[int, int, int, int], tuple[int, int, int, int]],
+ Image.Image,
+ ]
+ | None
+):
+ # skip to STARTCHAR
+ while True:
+ s = f.readline()
+ if not s:
+ return None
+ if s[:9] == b"STARTCHAR":
+ break
+ id = s[9:].strip().decode("ascii")
+
+ # load symbol properties
+ props = {}
+ while True:
+ s = f.readline()
+ if not s or s[:6] == b"BITMAP":
+ break
+ i = s.find(b" ")
+ props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii")
+
+ # load bitmap
+ bitmap = bytearray()
+ while True:
+ s = f.readline()
+ if not s or s[:7] == b"ENDCHAR":
+ break
+ bitmap += s[:-1]
+
+ # The word BBX
+ # followed by the width in x (BBw), height in y (BBh),
+ # and x and y displacement (BBxoff0, BByoff0)
+ # of the lower left corner from the origin of the character.
+ width, height, x_disp, y_disp = (int(p) for p in props["BBX"].split())
+
+ # The word DWIDTH
+ # followed by the width in x and y of the character in device pixels.
+ dwx, dwy = (int(p) for p in props["DWIDTH"].split())
+
+ bbox = (
+ (dwx, dwy),
+ (x_disp, -y_disp - height, width + x_disp, -y_disp),
+ (0, 0, width, height),
+ )
+
+ try:
+ im = Image.frombytes("1", (width, height), bitmap, "hex", "1")
+ except ValueError:
+ # deal with zero-width characters
+ im = Image.new("1", (width, height))
+
+ return id, int(props["ENCODING"]), bbox, im
+
+
+class BdfFontFile(FontFile.FontFile):
+ """Font file plugin for the X11 BDF format."""
+
+ def __init__(self, fp: BinaryIO):
+ super().__init__()
+
+ s = fp.readline()
+ if s[:13] != b"STARTFONT 2.1":
+ msg = "not a valid BDF file"
+ raise SyntaxError(msg)
+
+ props = {}
+ comments = []
+
+ while True:
+ s = fp.readline()
+ if not s or s[:13] == b"ENDPROPERTIES":
+ break
+ i = s.find(b" ")
+ props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii")
+ if s[:i] in [b"COMMENT", b"COPYRIGHT"]:
+ if s.find(b"LogicalFontDescription") < 0:
+ comments.append(s[i + 1 : -1].decode("ascii"))
+
+ while True:
+ c = bdf_char(fp)
+ if not c:
+ break
+ id, ch, (xy, dst, src), im = c
+ if 0 <= ch < len(self.glyph):
+ self.glyph[ch] = xy, dst, src, im
diff --git a/Lib/site-packages/PIL/BlpImagePlugin.py b/Lib/site-packages/PIL/BlpImagePlugin.py
new file mode 100644
index 0000000..b8f38b7
--- /dev/null
+++ b/Lib/site-packages/PIL/BlpImagePlugin.py
@@ -0,0 +1,475 @@
+"""
+Blizzard Mipmap Format (.blp)
+Jerome Leclanche
+
+The contents of this file are hereby released in the public domain (CC0)
+Full text of the CC0 license:
+ https://creativecommons.org/publicdomain/zero/1.0/
+
+BLP1 files, used mostly in Warcraft III, are not fully supported.
+All types of BLP2 files used in World of Warcraft are supported.
+
+The BLP file structure consists of a header, up to 16 mipmaps of the
+texture
+
+Texture sizes must be powers of two, though the two dimensions do
+not have to be equal; 512x256 is valid, but 512x200 is not.
+The first mipmap (mipmap #0) is the full size image; each subsequent
+mipmap halves both dimensions. The final mipmap should be 1x1.
+
+BLP files come in many different flavours:
+* JPEG-compressed (type == 0) - only supported for BLP1.
+* RAW images (type == 1, encoding == 1). Each mipmap is stored as an
+ array of 8-bit values, one per pixel, left to right, top to bottom.
+ Each value is an index to the palette.
+* DXT-compressed (type == 1, encoding == 2):
+- DXT1 compression is used if alpha_encoding == 0.
+ - An additional alpha bit is used if alpha_depth == 1.
+ - DXT3 compression is used if alpha_encoding == 1.
+ - DXT5 compression is used if alpha_encoding == 7.
+"""
+from __future__ import annotations
+
+import os
+import struct
+from enum import IntEnum
+from io import BytesIO
+
+from . import Image, ImageFile
+
+
+class Format(IntEnum):
+ JPEG = 0
+
+
+class Encoding(IntEnum):
+ UNCOMPRESSED = 1
+ DXT = 2
+ UNCOMPRESSED_RAW_BGRA = 3
+
+
+class AlphaEncoding(IntEnum):
+ DXT1 = 0
+ DXT3 = 1
+ DXT5 = 7
+
+
+def unpack_565(i):
+ return ((i >> 11) & 0x1F) << 3, ((i >> 5) & 0x3F) << 2, (i & 0x1F) << 3
+
+
+def decode_dxt1(data, alpha=False):
+ """
+ input: one "row" of data (i.e. will produce 4*width pixels)
+ """
+
+ blocks = len(data) // 8 # number of blocks in row
+ ret = (bytearray(), bytearray(), bytearray(), bytearray())
+
+ for block in range(blocks):
+ # Decode next 8-byte block.
+ idx = block * 8
+ color0, color1, bits = struct.unpack_from("> 2
+
+ a = 0xFF
+ if control == 0:
+ r, g, b = r0, g0, b0
+ elif control == 1:
+ r, g, b = r1, g1, b1
+ elif control == 2:
+ if color0 > color1:
+ r = (2 * r0 + r1) // 3
+ g = (2 * g0 + g1) // 3
+ b = (2 * b0 + b1) // 3
+ else:
+ r = (r0 + r1) // 2
+ g = (g0 + g1) // 2
+ b = (b0 + b1) // 2
+ elif control == 3:
+ if color0 > color1:
+ r = (2 * r1 + r0) // 3
+ g = (2 * g1 + g0) // 3
+ b = (2 * b1 + b0) // 3
+ else:
+ r, g, b, a = 0, 0, 0, 0
+
+ if alpha:
+ ret[j].extend([r, g, b, a])
+ else:
+ ret[j].extend([r, g, b])
+
+ return ret
+
+
+def decode_dxt3(data):
+ """
+ input: one "row" of data (i.e. will produce 4*width pixels)
+ """
+
+ blocks = len(data) // 16 # number of blocks in row
+ ret = (bytearray(), bytearray(), bytearray(), bytearray())
+
+ for block in range(blocks):
+ idx = block * 16
+ block = data[idx : idx + 16]
+ # Decode next 16-byte block.
+ bits = struct.unpack_from("<8B", block)
+ color0, color1 = struct.unpack_from(">= 4
+ else:
+ high = True
+ a &= 0xF
+ a *= 17 # We get a value between 0 and 15
+
+ color_code = (code >> 2 * (4 * j + i)) & 0x03
+
+ if color_code == 0:
+ r, g, b = r0, g0, b0
+ elif color_code == 1:
+ r, g, b = r1, g1, b1
+ elif color_code == 2:
+ r = (2 * r0 + r1) // 3
+ g = (2 * g0 + g1) // 3
+ b = (2 * b0 + b1) // 3
+ elif color_code == 3:
+ r = (2 * r1 + r0) // 3
+ g = (2 * g1 + g0) // 3
+ b = (2 * b1 + b0) // 3
+
+ ret[j].extend([r, g, b, a])
+
+ return ret
+
+
+def decode_dxt5(data):
+ """
+ input: one "row" of data (i.e. will produce 4 * width pixels)
+ """
+
+ blocks = len(data) // 16 # number of blocks in row
+ ret = (bytearray(), bytearray(), bytearray(), bytearray())
+
+ for block in range(blocks):
+ idx = block * 16
+ block = data[idx : idx + 16]
+ # Decode next 16-byte block.
+ a0, a1 = struct.unpack_from("> alphacode_index) & 0x07
+ elif alphacode_index == 15:
+ alphacode = (alphacode2 >> 15) | ((alphacode1 << 1) & 0x06)
+ else: # alphacode_index >= 18 and alphacode_index <= 45
+ alphacode = (alphacode1 >> (alphacode_index - 16)) & 0x07
+
+ if alphacode == 0:
+ a = a0
+ elif alphacode == 1:
+ a = a1
+ elif a0 > a1:
+ a = ((8 - alphacode) * a0 + (alphacode - 1) * a1) // 7
+ elif alphacode == 6:
+ a = 0
+ elif alphacode == 7:
+ a = 255
+ else:
+ a = ((6 - alphacode) * a0 + (alphacode - 1) * a1) // 5
+
+ color_code = (code >> 2 * (4 * j + i)) & 0x03
+
+ if color_code == 0:
+ r, g, b = r0, g0, b0
+ elif color_code == 1:
+ r, g, b = r1, g1, b1
+ elif color_code == 2:
+ r = (2 * r0 + r1) // 3
+ g = (2 * g0 + g1) // 3
+ b = (2 * b0 + b1) // 3
+ elif color_code == 3:
+ r = (2 * r1 + r0) // 3
+ g = (2 * g1 + g0) // 3
+ b = (2 * b1 + b0) // 3
+
+ ret[j].extend([r, g, b, a])
+
+ return ret
+
+
+class BLPFormatError(NotImplementedError):
+ pass
+
+
+def _accept(prefix):
+ return prefix[:4] in (b"BLP1", b"BLP2")
+
+
+class BlpImageFile(ImageFile.ImageFile):
+ """
+ Blizzard Mipmap Format
+ """
+
+ format = "BLP"
+ format_description = "Blizzard Mipmap Format"
+
+ def _open(self):
+ self.magic = self.fp.read(4)
+
+ self.fp.seek(5, os.SEEK_CUR)
+ (self._blp_alpha_depth,) = struct.unpack(" mode, rawmode
+ 1: ("P", "P;1"),
+ 4: ("P", "P;4"),
+ 8: ("P", "P"),
+ 16: ("RGB", "BGR;15"),
+ 24: ("RGB", "BGR"),
+ 32: ("RGB", "BGRX"),
+}
+
+
+def _accept(prefix):
+ return prefix[:2] == b"BM"
+
+
+def _dib_accept(prefix):
+ return i32(prefix) in [12, 40, 64, 108, 124]
+
+
+# =============================================================================
+# Image plugin for the Windows BMP format.
+# =============================================================================
+class BmpImageFile(ImageFile.ImageFile):
+ """Image plugin for the Windows Bitmap format (BMP)"""
+
+ # ------------------------------------------------------------- Description
+ format_description = "Windows Bitmap"
+ format = "BMP"
+
+ # -------------------------------------------------- BMP Compression values
+ COMPRESSIONS = {"RAW": 0, "RLE8": 1, "RLE4": 2, "BITFIELDS": 3, "JPEG": 4, "PNG": 5}
+ for k, v in COMPRESSIONS.items():
+ vars()[k] = v
+
+ def _bitmap(self, header=0, offset=0):
+ """Read relevant info about the BMP"""
+ read, seek = self.fp.read, self.fp.seek
+ if header:
+ seek(header)
+ # read bmp header size @offset 14 (this is part of the header size)
+ file_info = {"header_size": i32(read(4)), "direction": -1}
+
+ # -------------------- If requested, read header at a specific position
+ # read the rest of the bmp header, without its size
+ header_data = ImageFile._safe_read(self.fp, file_info["header_size"] - 4)
+
+ # -------------------------------------------------- IBM OS/2 Bitmap v1
+ # ----- This format has different offsets because of width/height types
+ if file_info["header_size"] == 12:
+ file_info["width"] = i16(header_data, 0)
+ file_info["height"] = i16(header_data, 2)
+ file_info["planes"] = i16(header_data, 4)
+ file_info["bits"] = i16(header_data, 6)
+ file_info["compression"] = self.RAW
+ file_info["palette_padding"] = 3
+
+ # --------------------------------------------- Windows Bitmap v2 to v5
+ # v3, OS/2 v2, v4, v5
+ elif file_info["header_size"] in (40, 64, 108, 124):
+ file_info["y_flip"] = header_data[7] == 0xFF
+ file_info["direction"] = 1 if file_info["y_flip"] else -1
+ file_info["width"] = i32(header_data, 0)
+ file_info["height"] = (
+ i32(header_data, 4)
+ if not file_info["y_flip"]
+ else 2**32 - i32(header_data, 4)
+ )
+ file_info["planes"] = i16(header_data, 8)
+ file_info["bits"] = i16(header_data, 10)
+ file_info["compression"] = i32(header_data, 12)
+ # byte size of pixel data
+ file_info["data_size"] = i32(header_data, 16)
+ file_info["pixels_per_meter"] = (
+ i32(header_data, 20),
+ i32(header_data, 24),
+ )
+ file_info["colors"] = i32(header_data, 28)
+ file_info["palette_padding"] = 4
+ self.info["dpi"] = tuple(x / 39.3701 for x in file_info["pixels_per_meter"])
+ if file_info["compression"] == self.BITFIELDS:
+ if len(header_data) >= 52:
+ for idx, mask in enumerate(
+ ["r_mask", "g_mask", "b_mask", "a_mask"]
+ ):
+ file_info[mask] = i32(header_data, 36 + idx * 4)
+ else:
+ # 40 byte headers only have the three components in the
+ # bitfields masks, ref:
+ # https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx
+ # See also
+ # https://github.com/python-pillow/Pillow/issues/1293
+ # There is a 4th component in the RGBQuad, in the alpha
+ # location, but it is listed as a reserved component,
+ # and it is not generally an alpha channel
+ file_info["a_mask"] = 0x0
+ for mask in ["r_mask", "g_mask", "b_mask"]:
+ file_info[mask] = i32(read(4))
+ file_info["rgb_mask"] = (
+ file_info["r_mask"],
+ file_info["g_mask"],
+ file_info["b_mask"],
+ )
+ file_info["rgba_mask"] = (
+ file_info["r_mask"],
+ file_info["g_mask"],
+ file_info["b_mask"],
+ file_info["a_mask"],
+ )
+ else:
+ msg = f"Unsupported BMP header type ({file_info['header_size']})"
+ raise OSError(msg)
+
+ # ------------------ Special case : header is reported 40, which
+ # ---------------------- is shorter than real size for bpp >= 16
+ self._size = file_info["width"], file_info["height"]
+
+ # ------- If color count was not found in the header, compute from bits
+ file_info["colors"] = (
+ file_info["colors"]
+ if file_info.get("colors", 0)
+ else (1 << file_info["bits"])
+ )
+ if offset == 14 + file_info["header_size"] and file_info["bits"] <= 8:
+ offset += 4 * file_info["colors"]
+
+ # ---------------------- Check bit depth for unusual unsupported values
+ self._mode, raw_mode = BIT2MODE.get(file_info["bits"], (None, None))
+ if self.mode is None:
+ msg = f"Unsupported BMP pixel depth ({file_info['bits']})"
+ raise OSError(msg)
+
+ # ---------------- Process BMP with Bitfields compression (not palette)
+ decoder_name = "raw"
+ if file_info["compression"] == self.BITFIELDS:
+ SUPPORTED = {
+ 32: [
+ (0xFF0000, 0xFF00, 0xFF, 0x0),
+ (0xFF000000, 0xFF0000, 0xFF00, 0x0),
+ (0xFF000000, 0xFF0000, 0xFF00, 0xFF),
+ (0xFF, 0xFF00, 0xFF0000, 0xFF000000),
+ (0xFF0000, 0xFF00, 0xFF, 0xFF000000),
+ (0x0, 0x0, 0x0, 0x0),
+ ],
+ 24: [(0xFF0000, 0xFF00, 0xFF)],
+ 16: [(0xF800, 0x7E0, 0x1F), (0x7C00, 0x3E0, 0x1F)],
+ }
+ MASK_MODES = {
+ (32, (0xFF0000, 0xFF00, 0xFF, 0x0)): "BGRX",
+ (32, (0xFF000000, 0xFF0000, 0xFF00, 0x0)): "XBGR",
+ (32, (0xFF000000, 0xFF0000, 0xFF00, 0xFF)): "ABGR",
+ (32, (0xFF, 0xFF00, 0xFF0000, 0xFF000000)): "RGBA",
+ (32, (0xFF0000, 0xFF00, 0xFF, 0xFF000000)): "BGRA",
+ (32, (0x0, 0x0, 0x0, 0x0)): "BGRA",
+ (24, (0xFF0000, 0xFF00, 0xFF)): "BGR",
+ (16, (0xF800, 0x7E0, 0x1F)): "BGR;16",
+ (16, (0x7C00, 0x3E0, 0x1F)): "BGR;15",
+ }
+ if file_info["bits"] in SUPPORTED:
+ if (
+ file_info["bits"] == 32
+ and file_info["rgba_mask"] in SUPPORTED[file_info["bits"]]
+ ):
+ raw_mode = MASK_MODES[(file_info["bits"], file_info["rgba_mask"])]
+ self._mode = "RGBA" if "A" in raw_mode else self.mode
+ elif (
+ file_info["bits"] in (24, 16)
+ and file_info["rgb_mask"] in SUPPORTED[file_info["bits"]]
+ ):
+ raw_mode = MASK_MODES[(file_info["bits"], file_info["rgb_mask"])]
+ else:
+ msg = "Unsupported BMP bitfields layout"
+ raise OSError(msg)
+ else:
+ msg = "Unsupported BMP bitfields layout"
+ raise OSError(msg)
+ elif file_info["compression"] == self.RAW:
+ if file_info["bits"] == 32 and header == 22: # 32-bit .cur offset
+ raw_mode, self._mode = "BGRA", "RGBA"
+ elif file_info["compression"] in (self.RLE8, self.RLE4):
+ decoder_name = "bmp_rle"
+ else:
+ msg = f"Unsupported BMP compression ({file_info['compression']})"
+ raise OSError(msg)
+
+ # --------------- Once the header is processed, process the palette/LUT
+ if self.mode == "P": # Paletted for 1, 4 and 8 bit images
+ # ---------------------------------------------------- 1-bit images
+ if not (0 < file_info["colors"] <= 65536):
+ msg = f"Unsupported BMP Palette size ({file_info['colors']})"
+ raise OSError(msg)
+ else:
+ padding = file_info["palette_padding"]
+ palette = read(padding * file_info["colors"])
+ grayscale = True
+ indices = (
+ (0, 255)
+ if file_info["colors"] == 2
+ else list(range(file_info["colors"]))
+ )
+
+ # ----------------- Check if grayscale and ignore palette if so
+ for ind, val in enumerate(indices):
+ rgb = palette[ind * padding : ind * padding + 3]
+ if rgb != o8(val) * 3:
+ grayscale = False
+
+ # ------- If all colors are gray, white or black, ditch palette
+ if grayscale:
+ self._mode = "1" if file_info["colors"] == 2 else "L"
+ raw_mode = self.mode
+ else:
+ self._mode = "P"
+ self.palette = ImagePalette.raw(
+ "BGRX" if padding == 4 else "BGR", palette
+ )
+
+ # ---------------------------- Finally set the tile data for the plugin
+ self.info["compression"] = file_info["compression"]
+ args = [raw_mode]
+ if decoder_name == "bmp_rle":
+ args.append(file_info["compression"] == self.RLE4)
+ else:
+ args.append(((file_info["width"] * file_info["bits"] + 31) >> 3) & (~3))
+ args.append(file_info["direction"])
+ self.tile = [
+ (
+ decoder_name,
+ (0, 0, file_info["width"], file_info["height"]),
+ offset or self.fp.tell(),
+ tuple(args),
+ )
+ ]
+
+ def _open(self):
+ """Open file, check magic number and read header"""
+ # read 14 bytes: magic number, filesize, reserved, header final offset
+ head_data = self.fp.read(14)
+ # choke if the file does not have the required magic bytes
+ if not _accept(head_data):
+ msg = "Not a BMP file"
+ raise SyntaxError(msg)
+ # read the start position of the BMP image data (u32)
+ offset = i32(head_data, 10)
+ # load bitmap information (offset=raster info)
+ self._bitmap(offset=offset)
+
+
+class BmpRleDecoder(ImageFile.PyDecoder):
+ _pulls_fd = True
+
+ def decode(self, buffer):
+ rle4 = self.args[1]
+ data = bytearray()
+ x = 0
+ while len(data) < self.state.xsize * self.state.ysize:
+ pixels = self.fd.read(1)
+ byte = self.fd.read(1)
+ if not pixels or not byte:
+ break
+ num_pixels = pixels[0]
+ if num_pixels:
+ # encoded mode
+ if x + num_pixels > self.state.xsize:
+ # Too much data for row
+ num_pixels = max(0, self.state.xsize - x)
+ if rle4:
+ first_pixel = o8(byte[0] >> 4)
+ second_pixel = o8(byte[0] & 0x0F)
+ for index in range(num_pixels):
+ if index % 2 == 0:
+ data += first_pixel
+ else:
+ data += second_pixel
+ else:
+ data += byte * num_pixels
+ x += num_pixels
+ else:
+ if byte[0] == 0:
+ # end of line
+ while len(data) % self.state.xsize != 0:
+ data += b"\x00"
+ x = 0
+ elif byte[0] == 1:
+ # end of bitmap
+ break
+ elif byte[0] == 2:
+ # delta
+ bytes_read = self.fd.read(2)
+ if len(bytes_read) < 2:
+ break
+ right, up = self.fd.read(2)
+ data += b"\x00" * (right + up * self.state.xsize)
+ x = len(data) % self.state.xsize
+ else:
+ # absolute mode
+ if rle4:
+ # 2 pixels per byte
+ byte_count = byte[0] // 2
+ bytes_read = self.fd.read(byte_count)
+ for byte_read in bytes_read:
+ data += o8(byte_read >> 4)
+ data += o8(byte_read & 0x0F)
+ else:
+ byte_count = byte[0]
+ bytes_read = self.fd.read(byte_count)
+ data += bytes_read
+ if len(bytes_read) < byte_count:
+ break
+ x += byte[0]
+
+ # align to 16-bit word boundary
+ if self.fd.tell() % 2 != 0:
+ self.fd.seek(1, os.SEEK_CUR)
+ rawmode = "L" if self.mode == "L" else "P"
+ self.set_as_raw(bytes(data), (rawmode, 0, self.args[-1]))
+ return -1, 0
+
+
+# =============================================================================
+# Image plugin for the DIB format (BMP alias)
+# =============================================================================
+class DibImageFile(BmpImageFile):
+ format = "DIB"
+ format_description = "Windows Bitmap"
+
+ def _open(self):
+ self._bitmap()
+
+
+#
+# --------------------------------------------------------------------
+# Write BMP file
+
+
+SAVE = {
+ "1": ("1", 1, 2),
+ "L": ("L", 8, 256),
+ "P": ("P", 8, 256),
+ "RGB": ("BGR", 24, 0),
+ "RGBA": ("BGRA", 32, 0),
+}
+
+
+def _dib_save(im, fp, filename):
+ _save(im, fp, filename, False)
+
+
+def _save(im, fp, filename, bitmap_header=True):
+ try:
+ rawmode, bits, colors = SAVE[im.mode]
+ except KeyError as e:
+ msg = f"cannot write mode {im.mode} as BMP"
+ raise OSError(msg) from e
+
+ info = im.encoderinfo
+
+ dpi = info.get("dpi", (96, 96))
+
+ # 1 meter == 39.3701 inches
+ ppm = tuple(int(x * 39.3701 + 0.5) for x in dpi)
+
+ stride = ((im.size[0] * bits + 7) // 8 + 3) & (~3)
+ header = 40 # or 64 for OS/2 version 2
+ image = stride * im.size[1]
+
+ if im.mode == "1":
+ palette = b"".join(o8(i) * 4 for i in (0, 255))
+ elif im.mode == "L":
+ palette = b"".join(o8(i) * 4 for i in range(256))
+ elif im.mode == "P":
+ palette = im.im.getpalette("RGB", "BGRX")
+ colors = len(palette) // 4
+ else:
+ palette = None
+
+ # bitmap header
+ if bitmap_header:
+ offset = 14 + header + colors * 4
+ file_size = offset + image
+ if file_size > 2**32 - 1:
+ msg = "File size is too large for the BMP format"
+ raise ValueError(msg)
+ fp.write(
+ b"BM" # file type (magic)
+ + o32(file_size) # file size
+ + o32(0) # reserved
+ + o32(offset) # image data offset
+ )
+
+ # bitmap info header
+ fp.write(
+ o32(header) # info header size
+ + o32(im.size[0]) # width
+ + o32(im.size[1]) # height
+ + o16(1) # planes
+ + o16(bits) # depth
+ + o32(0) # compression (0=uncompressed)
+ + o32(image) # size of bitmap
+ + o32(ppm[0]) # resolution
+ + o32(ppm[1]) # resolution
+ + o32(colors) # colors used
+ + o32(colors) # colors important
+ )
+
+ fp.write(b"\0" * (header - 40)) # padding (for OS/2 format)
+
+ if palette:
+ fp.write(palette)
+
+ ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, stride, -1))])
+
+
+#
+# --------------------------------------------------------------------
+# Registry
+
+
+Image.register_open(BmpImageFile.format, BmpImageFile, _accept)
+Image.register_save(BmpImageFile.format, _save)
+
+Image.register_extension(BmpImageFile.format, ".bmp")
+
+Image.register_mime(BmpImageFile.format, "image/bmp")
+
+Image.register_decoder("bmp_rle", BmpRleDecoder)
+
+Image.register_open(DibImageFile.format, DibImageFile, _dib_accept)
+Image.register_save(DibImageFile.format, _dib_save)
+
+Image.register_extension(DibImageFile.format, ".dib")
+
+Image.register_mime(DibImageFile.format, "image/bmp")
diff --git a/Lib/site-packages/PIL/BufrStubImagePlugin.py b/Lib/site-packages/PIL/BufrStubImagePlugin.py
new file mode 100644
index 0000000..60f3ec2
--- /dev/null
+++ b/Lib/site-packages/PIL/BufrStubImagePlugin.py
@@ -0,0 +1,74 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# BUFR stub adapter
+#
+# Copyright (c) 1996-2003 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+from . import Image, ImageFile
+
+_handler = None
+
+
+def register_handler(handler):
+ """
+ Install application-specific BUFR image handler.
+
+ :param handler: Handler object.
+ """
+ global _handler
+ _handler = handler
+
+
+# --------------------------------------------------------------------
+# Image adapter
+
+
+def _accept(prefix):
+ return prefix[:4] == b"BUFR" or prefix[:4] == b"ZCZC"
+
+
+class BufrStubImageFile(ImageFile.StubImageFile):
+ format = "BUFR"
+ format_description = "BUFR"
+
+ def _open(self):
+ offset = self.fp.tell()
+
+ if not _accept(self.fp.read(4)):
+ msg = "Not a BUFR file"
+ raise SyntaxError(msg)
+
+ self.fp.seek(offset)
+
+ # make something up
+ self._mode = "F"
+ self._size = 1, 1
+
+ loader = self._load()
+ if loader:
+ loader.open(self)
+
+ def _load(self):
+ return _handler
+
+
+def _save(im, fp, filename):
+ if _handler is None or not hasattr(_handler, "save"):
+ msg = "BUFR save handler not installed"
+ raise OSError(msg)
+ _handler.save(im, fp, filename)
+
+
+# --------------------------------------------------------------------
+# Registry
+
+Image.register_open(BufrStubImageFile.format, BufrStubImageFile, _accept)
+Image.register_save(BufrStubImageFile.format, _save)
+
+Image.register_extension(BufrStubImageFile.format, ".bufr")
diff --git a/Lib/site-packages/PIL/ContainerIO.py b/Lib/site-packages/PIL/ContainerIO.py
new file mode 100644
index 0000000..0035296
--- /dev/null
+++ b/Lib/site-packages/PIL/ContainerIO.py
@@ -0,0 +1,121 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# a class to read from a container file
+#
+# History:
+# 1995-06-18 fl Created
+# 1995-09-07 fl Added readline(), readlines()
+#
+# Copyright (c) 1997-2001 by Secret Labs AB
+# Copyright (c) 1995 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import io
+from typing import IO, AnyStr, Generic, Literal
+
+
+class ContainerIO(Generic[AnyStr]):
+ """
+ A file object that provides read access to a part of an existing
+ file (for example a TAR file).
+ """
+
+ def __init__(self, file: IO[AnyStr], offset: int, length: int) -> None:
+ """
+ Create file object.
+
+ :param file: Existing file.
+ :param offset: Start of region, in bytes.
+ :param length: Size of region, in bytes.
+ """
+ self.fh: IO[AnyStr] = file
+ self.pos = 0
+ self.offset = offset
+ self.length = length
+ self.fh.seek(offset)
+
+ ##
+ # Always false.
+
+ def isatty(self) -> bool:
+ return False
+
+ def seek(self, offset: int, mode: Literal[0, 1, 2] = io.SEEK_SET) -> None:
+ """
+ Move file pointer.
+
+ :param offset: Offset in bytes.
+ :param mode: Starting position. Use 0 for beginning of region, 1
+ for current offset, and 2 for end of region. You cannot move
+ the pointer outside the defined region.
+ """
+ if mode == 1:
+ self.pos = self.pos + offset
+ elif mode == 2:
+ self.pos = self.length + offset
+ else:
+ self.pos = offset
+ # clamp
+ self.pos = max(0, min(self.pos, self.length))
+ self.fh.seek(self.offset + self.pos)
+
+ def tell(self) -> int:
+ """
+ Get current file pointer.
+
+ :returns: Offset from start of region, in bytes.
+ """
+ return self.pos
+
+ def read(self, n: int = 0) -> AnyStr:
+ """
+ Read data.
+
+ :param n: Number of bytes to read. If omitted or zero,
+ read until end of region.
+ :returns: An 8-bit string.
+ """
+ if n:
+ n = min(n, self.length - self.pos)
+ else:
+ n = self.length - self.pos
+ if not n: # EOF
+ return b"" if "b" in self.fh.mode else "" # type: ignore[return-value]
+ self.pos = self.pos + n
+ return self.fh.read(n)
+
+ def readline(self) -> AnyStr:
+ """
+ Read a line of text.
+
+ :returns: An 8-bit string.
+ """
+ s: AnyStr = b"" if "b" in self.fh.mode else "" # type: ignore[assignment]
+ newline_character = b"\n" if "b" in self.fh.mode else "\n"
+ while True:
+ c = self.read(1)
+ if not c:
+ break
+ s = s + c
+ if c == newline_character:
+ break
+ return s
+
+ def readlines(self) -> list[AnyStr]:
+ """
+ Read multiple lines of text.
+
+ :returns: A list of 8-bit strings.
+ """
+ lines = []
+ while True:
+ s = self.readline()
+ if not s:
+ break
+ lines.append(s)
+ return lines
diff --git a/Lib/site-packages/PIL/CurImagePlugin.py b/Lib/site-packages/PIL/CurImagePlugin.py
new file mode 100644
index 0000000..5fb2b01
--- /dev/null
+++ b/Lib/site-packages/PIL/CurImagePlugin.py
@@ -0,0 +1,75 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# Windows Cursor support for PIL
+#
+# notes:
+# uses BmpImagePlugin.py to read the bitmap data.
+#
+# history:
+# 96-05-27 fl Created
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1996.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+from . import BmpImagePlugin, Image
+from ._binary import i16le as i16
+from ._binary import i32le as i32
+
+#
+# --------------------------------------------------------------------
+
+
+def _accept(prefix):
+ return prefix[:4] == b"\0\0\2\0"
+
+
+##
+# Image plugin for Windows Cursor files.
+
+
+class CurImageFile(BmpImagePlugin.BmpImageFile):
+ format = "CUR"
+ format_description = "Windows Cursor"
+
+ def _open(self):
+ offset = self.fp.tell()
+
+ # check magic
+ s = self.fp.read(6)
+ if not _accept(s):
+ msg = "not a CUR file"
+ raise SyntaxError(msg)
+
+ # pick the largest cursor in the file
+ m = b""
+ for i in range(i16(s, 4)):
+ s = self.fp.read(16)
+ if not m:
+ m = s
+ elif s[0] > m[0] and s[1] > m[1]:
+ m = s
+ if not m:
+ msg = "No cursors were found"
+ raise TypeError(msg)
+
+ # load as bitmap
+ self._bitmap(i32(m, 12) + offset)
+
+ # patch up the bitmap height
+ self._size = self.size[0], self.size[1] // 2
+ d, e, o, a = self.tile[0]
+ self.tile[0] = d, (0, 0) + self.size, o, a
+
+
+#
+# --------------------------------------------------------------------
+
+Image.register_open(CurImageFile.format, CurImageFile, _accept)
+
+Image.register_extension(CurImageFile.format, ".cur")
diff --git a/Lib/site-packages/PIL/DcxImagePlugin.py b/Lib/site-packages/PIL/DcxImagePlugin.py
new file mode 100644
index 0000000..f7344df
--- /dev/null
+++ b/Lib/site-packages/PIL/DcxImagePlugin.py
@@ -0,0 +1,80 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# DCX file handling
+#
+# DCX is a container file format defined by Intel, commonly used
+# for fax applications. Each DCX file consists of a directory
+# (a list of file offsets) followed by a set of (usually 1-bit)
+# PCX files.
+#
+# History:
+# 1995-09-09 fl Created
+# 1996-03-20 fl Properly derived from PcxImageFile.
+# 1998-07-15 fl Renamed offset attribute to avoid name clash
+# 2002-07-30 fl Fixed file handling
+#
+# Copyright (c) 1997-98 by Secret Labs AB.
+# Copyright (c) 1995-96 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+from . import Image
+from ._binary import i32le as i32
+from .PcxImagePlugin import PcxImageFile
+
+MAGIC = 0x3ADE68B1 # QUIZ: what's this value, then?
+
+
+def _accept(prefix):
+ return len(prefix) >= 4 and i32(prefix) == MAGIC
+
+
+##
+# Image plugin for the Intel DCX format.
+
+
+class DcxImageFile(PcxImageFile):
+ format = "DCX"
+ format_description = "Intel DCX"
+ _close_exclusive_fp_after_loading = False
+
+ def _open(self):
+ # Header
+ s = self.fp.read(4)
+ if not _accept(s):
+ msg = "not a DCX file"
+ raise SyntaxError(msg)
+
+ # Component directory
+ self._offset = []
+ for i in range(1024):
+ offset = i32(self.fp.read(4))
+ if not offset:
+ break
+ self._offset.append(offset)
+
+ self._fp = self.fp
+ self.frame = None
+ self.n_frames = len(self._offset)
+ self.is_animated = self.n_frames > 1
+ self.seek(0)
+
+ def seek(self, frame):
+ if not self._seek_check(frame):
+ return
+ self.frame = frame
+ self.fp = self._fp
+ self.fp.seek(self._offset[frame])
+ PcxImageFile._open(self)
+
+ def tell(self):
+ return self.frame
+
+
+Image.register_open(DcxImageFile.format, DcxImageFile, _accept)
+
+Image.register_extension(DcxImageFile.format, ".dcx")
diff --git a/Lib/site-packages/PIL/DdsImagePlugin.py b/Lib/site-packages/PIL/DdsImagePlugin.py
new file mode 100644
index 0000000..eb4c8f5
--- /dev/null
+++ b/Lib/site-packages/PIL/DdsImagePlugin.py
@@ -0,0 +1,566 @@
+"""
+A Pillow loader for .dds files (S3TC-compressed aka DXTC)
+Jerome Leclanche
+
+Documentation:
+https://web.archive.org/web/20170802060935/http://oss.sgi.com/projects/ogl-sample/registry/EXT/texture_compression_s3tc.txt
+
+The contents of this file are hereby released in the public domain (CC0)
+Full text of the CC0 license:
+https://creativecommons.org/publicdomain/zero/1.0/
+"""
+from __future__ import annotations
+
+import io
+import struct
+import sys
+from enum import IntEnum, IntFlag
+
+from . import Image, ImageFile, ImagePalette
+from ._binary import i32le as i32
+from ._binary import o8
+from ._binary import o32le as o32
+
+# Magic ("DDS ")
+DDS_MAGIC = 0x20534444
+
+
+# DDS flags
+class DDSD(IntFlag):
+ CAPS = 0x1
+ HEIGHT = 0x2
+ WIDTH = 0x4
+ PITCH = 0x8
+ PIXELFORMAT = 0x1000
+ MIPMAPCOUNT = 0x20000
+ LINEARSIZE = 0x80000
+ DEPTH = 0x800000
+
+
+# DDS caps
+class DDSCAPS(IntFlag):
+ COMPLEX = 0x8
+ TEXTURE = 0x1000
+ MIPMAP = 0x400000
+
+
+class DDSCAPS2(IntFlag):
+ CUBEMAP = 0x200
+ CUBEMAP_POSITIVEX = 0x400
+ CUBEMAP_NEGATIVEX = 0x800
+ CUBEMAP_POSITIVEY = 0x1000
+ CUBEMAP_NEGATIVEY = 0x2000
+ CUBEMAP_POSITIVEZ = 0x4000
+ CUBEMAP_NEGATIVEZ = 0x8000
+ VOLUME = 0x200000
+
+
+# Pixel Format
+class DDPF(IntFlag):
+ ALPHAPIXELS = 0x1
+ ALPHA = 0x2
+ FOURCC = 0x4
+ PALETTEINDEXED8 = 0x20
+ RGB = 0x40
+ LUMINANCE = 0x20000
+
+
+# dxgiformat.h
+class DXGI_FORMAT(IntEnum):
+ UNKNOWN = 0
+ R32G32B32A32_TYPELESS = 1
+ R32G32B32A32_FLOAT = 2
+ R32G32B32A32_UINT = 3
+ R32G32B32A32_SINT = 4
+ R32G32B32_TYPELESS = 5
+ R32G32B32_FLOAT = 6
+ R32G32B32_UINT = 7
+ R32G32B32_SINT = 8
+ R16G16B16A16_TYPELESS = 9
+ R16G16B16A16_FLOAT = 10
+ R16G16B16A16_UNORM = 11
+ R16G16B16A16_UINT = 12
+ R16G16B16A16_SNORM = 13
+ R16G16B16A16_SINT = 14
+ R32G32_TYPELESS = 15
+ R32G32_FLOAT = 16
+ R32G32_UINT = 17
+ R32G32_SINT = 18
+ R32G8X24_TYPELESS = 19
+ D32_FLOAT_S8X24_UINT = 20
+ R32_FLOAT_X8X24_TYPELESS = 21
+ X32_TYPELESS_G8X24_UINT = 22
+ R10G10B10A2_TYPELESS = 23
+ R10G10B10A2_UNORM = 24
+ R10G10B10A2_UINT = 25
+ R11G11B10_FLOAT = 26
+ R8G8B8A8_TYPELESS = 27
+ R8G8B8A8_UNORM = 28
+ R8G8B8A8_UNORM_SRGB = 29
+ R8G8B8A8_UINT = 30
+ R8G8B8A8_SNORM = 31
+ R8G8B8A8_SINT = 32
+ R16G16_TYPELESS = 33
+ R16G16_FLOAT = 34
+ R16G16_UNORM = 35
+ R16G16_UINT = 36
+ R16G16_SNORM = 37
+ R16G16_SINT = 38
+ R32_TYPELESS = 39
+ D32_FLOAT = 40
+ R32_FLOAT = 41
+ R32_UINT = 42
+ R32_SINT = 43
+ R24G8_TYPELESS = 44
+ D24_UNORM_S8_UINT = 45
+ R24_UNORM_X8_TYPELESS = 46
+ X24_TYPELESS_G8_UINT = 47
+ R8G8_TYPELESS = 48
+ R8G8_UNORM = 49
+ R8G8_UINT = 50
+ R8G8_SNORM = 51
+ R8G8_SINT = 52
+ R16_TYPELESS = 53
+ R16_FLOAT = 54
+ D16_UNORM = 55
+ R16_UNORM = 56
+ R16_UINT = 57
+ R16_SNORM = 58
+ R16_SINT = 59
+ R8_TYPELESS = 60
+ R8_UNORM = 61
+ R8_UINT = 62
+ R8_SNORM = 63
+ R8_SINT = 64
+ A8_UNORM = 65
+ R1_UNORM = 66
+ R9G9B9E5_SHAREDEXP = 67
+ R8G8_B8G8_UNORM = 68
+ G8R8_G8B8_UNORM = 69
+ BC1_TYPELESS = 70
+ BC1_UNORM = 71
+ BC1_UNORM_SRGB = 72
+ BC2_TYPELESS = 73
+ BC2_UNORM = 74
+ BC2_UNORM_SRGB = 75
+ BC3_TYPELESS = 76
+ BC3_UNORM = 77
+ BC3_UNORM_SRGB = 78
+ BC4_TYPELESS = 79
+ BC4_UNORM = 80
+ BC4_SNORM = 81
+ BC5_TYPELESS = 82
+ BC5_UNORM = 83
+ BC5_SNORM = 84
+ B5G6R5_UNORM = 85
+ B5G5R5A1_UNORM = 86
+ B8G8R8A8_UNORM = 87
+ B8G8R8X8_UNORM = 88
+ R10G10B10_XR_BIAS_A2_UNORM = 89
+ B8G8R8A8_TYPELESS = 90
+ B8G8R8A8_UNORM_SRGB = 91
+ B8G8R8X8_TYPELESS = 92
+ B8G8R8X8_UNORM_SRGB = 93
+ BC6H_TYPELESS = 94
+ BC6H_UF16 = 95
+ BC6H_SF16 = 96
+ BC7_TYPELESS = 97
+ BC7_UNORM = 98
+ BC7_UNORM_SRGB = 99
+ AYUV = 100
+ Y410 = 101
+ Y416 = 102
+ NV12 = 103
+ P010 = 104
+ P016 = 105
+ OPAQUE_420 = 106
+ YUY2 = 107
+ Y210 = 108
+ Y216 = 109
+ NV11 = 110
+ AI44 = 111
+ IA44 = 112
+ P8 = 113
+ A8P8 = 114
+ B4G4R4A4_UNORM = 115
+ P208 = 130
+ V208 = 131
+ V408 = 132
+ SAMPLER_FEEDBACK_MIN_MIP_OPAQUE = 189
+ SAMPLER_FEEDBACK_MIP_REGION_USED_OPAQUE = 190
+
+
+class D3DFMT(IntEnum):
+ UNKNOWN = 0
+ R8G8B8 = 20
+ A8R8G8B8 = 21
+ X8R8G8B8 = 22
+ R5G6B5 = 23
+ X1R5G5B5 = 24
+ A1R5G5B5 = 25
+ A4R4G4B4 = 26
+ R3G3B2 = 27
+ A8 = 28
+ A8R3G3B2 = 29
+ X4R4G4B4 = 30
+ A2B10G10R10 = 31
+ A8B8G8R8 = 32
+ X8B8G8R8 = 33
+ G16R16 = 34
+ A2R10G10B10 = 35
+ A16B16G16R16 = 36
+ A8P8 = 40
+ P8 = 41
+ L8 = 50
+ A8L8 = 51
+ A4L4 = 52
+ V8U8 = 60
+ L6V5U5 = 61
+ X8L8V8U8 = 62
+ Q8W8V8U8 = 63
+ V16U16 = 64
+ A2W10V10U10 = 67
+ D16_LOCKABLE = 70
+ D32 = 71
+ D15S1 = 73
+ D24S8 = 75
+ D24X8 = 77
+ D24X4S4 = 79
+ D16 = 80
+ D32F_LOCKABLE = 82
+ D24FS8 = 83
+ D32_LOCKABLE = 84
+ S8_LOCKABLE = 85
+ L16 = 81
+ VERTEXDATA = 100
+ INDEX16 = 101
+ INDEX32 = 102
+ Q16W16V16U16 = 110
+ R16F = 111
+ G16R16F = 112
+ A16B16G16R16F = 113
+ R32F = 114
+ G32R32F = 115
+ A32B32G32R32F = 116
+ CxV8U8 = 117
+ A1 = 118
+ A2B10G10R10_XR_BIAS = 119
+ BINARYBUFFER = 199
+
+ UYVY = i32(b"UYVY")
+ R8G8_B8G8 = i32(b"RGBG")
+ YUY2 = i32(b"YUY2")
+ G8R8_G8B8 = i32(b"GRGB")
+ DXT1 = i32(b"DXT1")
+ DXT2 = i32(b"DXT2")
+ DXT3 = i32(b"DXT3")
+ DXT4 = i32(b"DXT4")
+ DXT5 = i32(b"DXT5")
+ DX10 = i32(b"DX10")
+ BC4S = i32(b"BC4S")
+ BC4U = i32(b"BC4U")
+ BC5S = i32(b"BC5S")
+ BC5U = i32(b"BC5U")
+ ATI1 = i32(b"ATI1")
+ ATI2 = i32(b"ATI2")
+ MULTI2_ARGB8 = i32(b"MET1")
+
+
+# Backward compatibility layer
+module = sys.modules[__name__]
+for item in DDSD:
+ setattr(module, "DDSD_" + item.name, item.value)
+for item in DDSCAPS:
+ setattr(module, "DDSCAPS_" + item.name, item.value)
+for item in DDSCAPS2:
+ setattr(module, "DDSCAPS2_" + item.name, item.value)
+for item in DDPF:
+ setattr(module, "DDPF_" + item.name, item.value)
+
+DDS_FOURCC = DDPF.FOURCC
+DDS_RGB = DDPF.RGB
+DDS_RGBA = DDPF.RGB | DDPF.ALPHAPIXELS
+DDS_LUMINANCE = DDPF.LUMINANCE
+DDS_LUMINANCEA = DDPF.LUMINANCE | DDPF.ALPHAPIXELS
+DDS_ALPHA = DDPF.ALPHA
+DDS_PAL8 = DDPF.PALETTEINDEXED8
+
+DDS_HEADER_FLAGS_TEXTURE = DDSD.CAPS | DDSD.HEIGHT | DDSD.WIDTH | DDSD.PIXELFORMAT
+DDS_HEADER_FLAGS_MIPMAP = DDSD.MIPMAPCOUNT
+DDS_HEADER_FLAGS_VOLUME = DDSD.DEPTH
+DDS_HEADER_FLAGS_PITCH = DDSD.PITCH
+DDS_HEADER_FLAGS_LINEARSIZE = DDSD.LINEARSIZE
+
+DDS_HEIGHT = DDSD.HEIGHT
+DDS_WIDTH = DDSD.WIDTH
+
+DDS_SURFACE_FLAGS_TEXTURE = DDSCAPS.TEXTURE
+DDS_SURFACE_FLAGS_MIPMAP = DDSCAPS.COMPLEX | DDSCAPS.MIPMAP
+DDS_SURFACE_FLAGS_CUBEMAP = DDSCAPS.COMPLEX
+
+DDS_CUBEMAP_POSITIVEX = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_POSITIVEX
+DDS_CUBEMAP_NEGATIVEX = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_NEGATIVEX
+DDS_CUBEMAP_POSITIVEY = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_POSITIVEY
+DDS_CUBEMAP_NEGATIVEY = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_NEGATIVEY
+DDS_CUBEMAP_POSITIVEZ = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_POSITIVEZ
+DDS_CUBEMAP_NEGATIVEZ = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_NEGATIVEZ
+
+DXT1_FOURCC = D3DFMT.DXT1
+DXT3_FOURCC = D3DFMT.DXT3
+DXT5_FOURCC = D3DFMT.DXT5
+
+DXGI_FORMAT_R8G8B8A8_TYPELESS = DXGI_FORMAT.R8G8B8A8_TYPELESS
+DXGI_FORMAT_R8G8B8A8_UNORM = DXGI_FORMAT.R8G8B8A8_UNORM
+DXGI_FORMAT_R8G8B8A8_UNORM_SRGB = DXGI_FORMAT.R8G8B8A8_UNORM_SRGB
+DXGI_FORMAT_BC5_TYPELESS = DXGI_FORMAT.BC5_TYPELESS
+DXGI_FORMAT_BC5_UNORM = DXGI_FORMAT.BC5_UNORM
+DXGI_FORMAT_BC5_SNORM = DXGI_FORMAT.BC5_SNORM
+DXGI_FORMAT_BC6H_UF16 = DXGI_FORMAT.BC6H_UF16
+DXGI_FORMAT_BC6H_SF16 = DXGI_FORMAT.BC6H_SF16
+DXGI_FORMAT_BC7_TYPELESS = DXGI_FORMAT.BC7_TYPELESS
+DXGI_FORMAT_BC7_UNORM = DXGI_FORMAT.BC7_UNORM
+DXGI_FORMAT_BC7_UNORM_SRGB = DXGI_FORMAT.BC7_UNORM_SRGB
+
+
+class DdsImageFile(ImageFile.ImageFile):
+ format = "DDS"
+ format_description = "DirectDraw Surface"
+
+ def _open(self):
+ if not _accept(self.fp.read(4)):
+ msg = "not a DDS file"
+ raise SyntaxError(msg)
+ (header_size,) = struct.unpack("> (offset + 1) << (offset + 1) == mask:
+ offset += 1
+ mask_offsets.append(offset)
+ mask_totals.append(mask >> offset)
+
+ data = bytearray()
+ bytecount = bitcount // 8
+ while len(data) < self.state.xsize * self.state.ysize * len(masks):
+ value = int.from_bytes(self.fd.read(bytecount), "little")
+ for i, mask in enumerate(masks):
+ masked_value = value & mask
+ # Remove the zero padding, and scale it to 8 bits
+ data += o8(
+ int(((masked_value >> mask_offsets[i]) / mask_totals[i]) * 255)
+ )
+ self.set_as_raw(bytes(data))
+ return -1, 0
+
+
+def _save(im, fp, filename):
+ if im.mode not in ("RGB", "RGBA", "L", "LA"):
+ msg = f"cannot write mode {im.mode} as DDS"
+ raise OSError(msg)
+
+ alpha = im.mode[-1] == "A"
+ if im.mode[0] == "L":
+ pixel_flags = DDPF.LUMINANCE
+ rawmode = im.mode
+ if alpha:
+ rgba_mask = [0x000000FF, 0x000000FF, 0x000000FF]
+ else:
+ rgba_mask = [0xFF000000, 0xFF000000, 0xFF000000]
+ else:
+ pixel_flags = DDPF.RGB
+ rawmode = im.mode[::-1]
+ rgba_mask = [0x00FF0000, 0x0000FF00, 0x000000FF]
+
+ if alpha:
+ r, g, b, a = im.split()
+ im = Image.merge("RGBA", (a, r, g, b))
+ if alpha:
+ pixel_flags |= DDPF.ALPHAPIXELS
+ rgba_mask.append(0xFF000000 if alpha else 0)
+
+ flags = DDSD.CAPS | DDSD.HEIGHT | DDSD.WIDTH | DDSD.PITCH | DDSD.PIXELFORMAT
+ bitcount = len(im.getbands()) * 8
+ pitch = (im.width * bitcount + 7) // 8
+
+ fp.write(
+ o32(DDS_MAGIC)
+ + struct.pack(
+ "<7I",
+ 124, # header size
+ flags, # flags
+ im.height,
+ im.width,
+ pitch,
+ 0, # depth
+ 0, # mipmaps
+ )
+ + struct.pack("11I", *((0,) * 11)) # reserved
+ # pfsize, pfflags, fourcc, bitcount
+ + struct.pack("<4I", 32, pixel_flags, 0, bitcount)
+ + struct.pack("<4I", *rgba_mask) # dwRGBABitMask
+ + struct.pack("<5I", DDSCAPS.TEXTURE, 0, 0, 0, 0)
+ )
+ ImageFile._save(
+ im, fp, [ImageFile._Tile("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))]
+ )
+
+
+def _accept(prefix):
+ return prefix[:4] == b"DDS "
+
+
+Image.register_open(DdsImageFile.format, DdsImageFile, _accept)
+Image.register_decoder("dds_rgb", DdsRgbDecoder)
+Image.register_save(DdsImageFile.format, _save)
+Image.register_extension(DdsImageFile.format, ".dds")
diff --git a/Lib/site-packages/PIL/EpsImagePlugin.py b/Lib/site-packages/PIL/EpsImagePlugin.py
new file mode 100644
index 0000000..d2e60aa
--- /dev/null
+++ b/Lib/site-packages/PIL/EpsImagePlugin.py
@@ -0,0 +1,478 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# EPS file handling
+#
+# History:
+# 1995-09-01 fl Created (0.1)
+# 1996-05-18 fl Don't choke on "atend" fields, Ghostscript interface (0.2)
+# 1996-08-22 fl Don't choke on floating point BoundingBox values
+# 1996-08-23 fl Handle files from Macintosh (0.3)
+# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4)
+# 2003-09-07 fl Check gs.close status (from Federico Di Gregorio) (0.5)
+# 2014-05-07 e Handling of EPS with binary preview and fixed resolution
+# resizing
+#
+# Copyright (c) 1997-2003 by Secret Labs AB.
+# Copyright (c) 1995-2003 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import io
+import os
+import re
+import subprocess
+import sys
+import tempfile
+
+from . import Image, ImageFile
+from ._binary import i32le as i32
+from ._deprecate import deprecate
+
+# --------------------------------------------------------------------
+
+
+split = re.compile(r"^%%([^:]*):[ \t]*(.*)[ \t]*$")
+field = re.compile(r"^%[%!\w]([^:]*)[ \t]*$")
+
+gs_binary = None
+gs_windows_binary = None
+
+
+def has_ghostscript():
+ global gs_binary, gs_windows_binary
+ if gs_binary is None:
+ if sys.platform.startswith("win"):
+ if gs_windows_binary is None:
+ import shutil
+
+ for binary in ("gswin32c", "gswin64c", "gs"):
+ if shutil.which(binary) is not None:
+ gs_windows_binary = binary
+ break
+ else:
+ gs_windows_binary = False
+ gs_binary = gs_windows_binary
+ else:
+ try:
+ subprocess.check_call(["gs", "--version"], stdout=subprocess.DEVNULL)
+ gs_binary = "gs"
+ except OSError:
+ gs_binary = False
+ return gs_binary is not False
+
+
+def Ghostscript(tile, size, fp, scale=1, transparency=False):
+ """Render an image using Ghostscript"""
+ global gs_binary
+ if not has_ghostscript():
+ msg = "Unable to locate Ghostscript on paths"
+ raise OSError(msg)
+
+ # Unpack decoder tile
+ decoder, tile, offset, data = tile[0]
+ length, bbox = data
+
+ # Hack to support hi-res rendering
+ scale = int(scale) or 1
+ width = size[0] * scale
+ height = size[1] * scale
+ # resolution is dependent on bbox and size
+ res_x = 72.0 * width / (bbox[2] - bbox[0])
+ res_y = 72.0 * height / (bbox[3] - bbox[1])
+
+ out_fd, outfile = tempfile.mkstemp()
+ os.close(out_fd)
+
+ infile_temp = None
+ if hasattr(fp, "name") and os.path.exists(fp.name):
+ infile = fp.name
+ else:
+ in_fd, infile_temp = tempfile.mkstemp()
+ os.close(in_fd)
+ infile = infile_temp
+
+ # Ignore length and offset!
+ # Ghostscript can read it
+ # Copy whole file to read in Ghostscript
+ with open(infile_temp, "wb") as f:
+ # fetch length of fp
+ fp.seek(0, io.SEEK_END)
+ fsize = fp.tell()
+ # ensure start position
+ # go back
+ fp.seek(0)
+ lengthfile = fsize
+ while lengthfile > 0:
+ s = fp.read(min(lengthfile, 100 * 1024))
+ if not s:
+ break
+ lengthfile -= len(s)
+ f.write(s)
+
+ device = "pngalpha" if transparency else "ppmraw"
+
+ # Build Ghostscript command
+ command = [
+ gs_binary,
+ "-q", # quiet mode
+ f"-g{width:d}x{height:d}", # set output geometry (pixels)
+ f"-r{res_x:f}x{res_y:f}", # set input DPI (dots per inch)
+ "-dBATCH", # exit after processing
+ "-dNOPAUSE", # don't pause between pages
+ "-dSAFER", # safe mode
+ f"-sDEVICE={device}",
+ f"-sOutputFile={outfile}", # output file
+ # adjust for image origin
+ "-c",
+ f"{-bbox[0]} {-bbox[1]} translate",
+ "-f",
+ infile, # input file
+ # showpage (see https://bugs.ghostscript.com/show_bug.cgi?id=698272)
+ "-c",
+ "showpage",
+ ]
+
+ # push data through Ghostscript
+ try:
+ startupinfo = None
+ if sys.platform.startswith("win"):
+ startupinfo = subprocess.STARTUPINFO()
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+ subprocess.check_call(command, startupinfo=startupinfo)
+ out_im = Image.open(outfile)
+ out_im.load()
+ finally:
+ try:
+ os.unlink(outfile)
+ if infile_temp:
+ os.unlink(infile_temp)
+ except OSError:
+ pass
+
+ im = out_im.im.copy()
+ out_im.close()
+ return im
+
+
+class PSFile:
+ """
+ Wrapper for bytesio object that treats either CR or LF as end of line.
+ This class is no longer used internally, but kept for backwards compatibility.
+ """
+
+ def __init__(self, fp):
+ deprecate(
+ "PSFile",
+ 11,
+ action="If you need the functionality of this class "
+ "you will need to implement it yourself.",
+ )
+ self.fp = fp
+ self.char = None
+
+ def seek(self, offset, whence=io.SEEK_SET):
+ self.char = None
+ self.fp.seek(offset, whence)
+
+ def readline(self):
+ s = [self.char or b""]
+ self.char = None
+
+ c = self.fp.read(1)
+ while (c not in b"\r\n") and len(c):
+ s.append(c)
+ c = self.fp.read(1)
+
+ self.char = self.fp.read(1)
+ # line endings can be 1 or 2 of \r \n, in either order
+ if self.char in b"\r\n":
+ self.char = None
+
+ return b"".join(s).decode("latin-1")
+
+
+def _accept(prefix):
+ return prefix[:4] == b"%!PS" or (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5)
+
+
+##
+# Image plugin for Encapsulated PostScript. This plugin supports only
+# a few variants of this format.
+
+
+class EpsImageFile(ImageFile.ImageFile):
+ """EPS File Parser for the Python Imaging Library"""
+
+ format = "EPS"
+ format_description = "Encapsulated Postscript"
+
+ mode_map = {1: "L", 2: "LAB", 3: "RGB", 4: "CMYK"}
+
+ def _open(self):
+ (length, offset) = self._find_offset(self.fp)
+
+ # go to offset - start of "%!PS"
+ self.fp.seek(offset)
+
+ self._mode = "RGB"
+ self._size = None
+
+ byte_arr = bytearray(255)
+ bytes_mv = memoryview(byte_arr)
+ bytes_read = 0
+ reading_header_comments = True
+ reading_trailer_comments = False
+ trailer_reached = False
+
+ def check_required_header_comments():
+ if "PS-Adobe" not in self.info:
+ msg = 'EPS header missing "%!PS-Adobe" comment'
+ raise SyntaxError(msg)
+ if "BoundingBox" not in self.info:
+ msg = 'EPS header missing "%%BoundingBox" comment'
+ raise SyntaxError(msg)
+
+ def _read_comment(s):
+ nonlocal reading_trailer_comments
+ try:
+ m = split.match(s)
+ except re.error as e:
+ msg = "not an EPS file"
+ raise SyntaxError(msg) from e
+
+ if m:
+ k, v = m.group(1, 2)
+ self.info[k] = v
+ if k == "BoundingBox":
+ if v == "(atend)":
+ reading_trailer_comments = True
+ elif not self._size or (
+ trailer_reached and reading_trailer_comments
+ ):
+ try:
+ # Note: The DSC spec says that BoundingBox
+ # fields should be integers, but some drivers
+ # put floating point values there anyway.
+ box = [int(float(i)) for i in v.split()]
+ self._size = box[2] - box[0], box[3] - box[1]
+ self.tile = [
+ ("eps", (0, 0) + self.size, offset, (length, box))
+ ]
+ except Exception:
+ pass
+ return True
+
+ while True:
+ byte = self.fp.read(1)
+ if byte == b"":
+ # if we didn't read a byte we must be at the end of the file
+ if bytes_read == 0:
+ break
+ elif byte in b"\r\n":
+ # if we read a line ending character, ignore it and parse what
+ # we have already read. if we haven't read any other characters,
+ # continue reading
+ if bytes_read == 0:
+ continue
+ else:
+ # ASCII/hexadecimal lines in an EPS file must not exceed
+ # 255 characters, not including line ending characters
+ if bytes_read >= 255:
+ # only enforce this for lines starting with a "%",
+ # otherwise assume it's binary data
+ if byte_arr[0] == ord("%"):
+ msg = "not an EPS file"
+ raise SyntaxError(msg)
+ else:
+ if reading_header_comments:
+ check_required_header_comments()
+ reading_header_comments = False
+ # reset bytes_read so we can keep reading
+ # data until the end of the line
+ bytes_read = 0
+ byte_arr[bytes_read] = byte[0]
+ bytes_read += 1
+ continue
+
+ if reading_header_comments:
+ # Load EPS header
+
+ # if this line doesn't start with a "%",
+ # or does start with "%%EndComments",
+ # then we've reached the end of the header/comments
+ if byte_arr[0] != ord("%") or bytes_mv[:13] == b"%%EndComments":
+ check_required_header_comments()
+ reading_header_comments = False
+ continue
+
+ s = str(bytes_mv[:bytes_read], "latin-1")
+ if not _read_comment(s):
+ m = field.match(s)
+ if m:
+ k = m.group(1)
+ if k[:8] == "PS-Adobe":
+ self.info["PS-Adobe"] = k[9:]
+ else:
+ self.info[k] = ""
+ elif s[0] == "%":
+ # handle non-DSC PostScript comments that some
+ # tools mistakenly put in the Comments section
+ pass
+ else:
+ msg = "bad EPS header"
+ raise OSError(msg)
+ elif bytes_mv[:11] == b"%ImageData:":
+ # Check for an "ImageData" descriptor
+ # https://www.adobe.com/devnet-apps/photoshop/fileformatashtml/#50577413_pgfId-1035096
+
+ # Values:
+ # columns
+ # rows
+ # bit depth (1 or 8)
+ # mode (1: L, 2: LAB, 3: RGB, 4: CMYK)
+ # number of padding channels
+ # block size (number of bytes per row per channel)
+ # binary/ascii (1: binary, 2: ascii)
+ # data start identifier (the image data follows after a single line
+ # consisting only of this quoted value)
+ image_data_values = byte_arr[11:bytes_read].split(None, 7)
+ columns, rows, bit_depth, mode_id = (
+ int(value) for value in image_data_values[:4]
+ )
+
+ if bit_depth == 1:
+ self._mode = "1"
+ elif bit_depth == 8:
+ try:
+ self._mode = self.mode_map[mode_id]
+ except ValueError:
+ break
+ else:
+ break
+
+ self._size = columns, rows
+ return
+ elif trailer_reached and reading_trailer_comments:
+ # Load EPS trailer
+
+ # if this line starts with "%%EOF",
+ # then we've reached the end of the file
+ if bytes_mv[:5] == b"%%EOF":
+ break
+
+ s = str(bytes_mv[:bytes_read], "latin-1")
+ _read_comment(s)
+ elif bytes_mv[:9] == b"%%Trailer":
+ trailer_reached = True
+ bytes_read = 0
+
+ check_required_header_comments()
+
+ if not self._size:
+ msg = "cannot determine EPS bounding box"
+ raise OSError(msg)
+
+ def _find_offset(self, fp):
+ s = fp.read(4)
+
+ if s == b"%!PS":
+ # for HEAD without binary preview
+ fp.seek(0, io.SEEK_END)
+ length = fp.tell()
+ offset = 0
+ elif i32(s) == 0xC6D3D0C5:
+ # FIX for: Some EPS file not handled correctly / issue #302
+ # EPS can contain binary data
+ # or start directly with latin coding
+ # more info see:
+ # https://web.archive.org/web/20160528181353/http://partners.adobe.com/public/developer/en/ps/5002.EPSF_Spec.pdf
+ s = fp.read(8)
+ offset = i32(s)
+ length = i32(s, 4)
+ else:
+ msg = "not an EPS file"
+ raise SyntaxError(msg)
+
+ return length, offset
+
+ def load(self, scale=1, transparency=False):
+ # Load EPS via Ghostscript
+ if self.tile:
+ self.im = Ghostscript(self.tile, self.size, self.fp, scale, transparency)
+ self._mode = self.im.mode
+ self._size = self.im.size
+ self.tile = []
+ return Image.Image.load(self)
+
+ def load_seek(self, *args, **kwargs):
+ # we can't incrementally load, so force ImageFile.parser to
+ # use our custom load method by defining this method.
+ pass
+
+
+# --------------------------------------------------------------------
+
+
+def _save(im, fp, filename, eps=1):
+ """EPS Writer for the Python Imaging Library."""
+
+ # make sure image data is available
+ im.load()
+
+ # determine PostScript image mode
+ if im.mode == "L":
+ operator = (8, 1, b"image")
+ elif im.mode == "RGB":
+ operator = (8, 3, b"false 3 colorimage")
+ elif im.mode == "CMYK":
+ operator = (8, 4, b"false 4 colorimage")
+ else:
+ msg = "image mode is not supported"
+ raise ValueError(msg)
+
+ if eps:
+ # write EPS header
+ fp.write(b"%!PS-Adobe-3.0 EPSF-3.0\n")
+ fp.write(b"%%Creator: PIL 0.1 EpsEncode\n")
+ # fp.write("%%CreationDate: %s"...)
+ fp.write(b"%%%%BoundingBox: 0 0 %d %d\n" % im.size)
+ fp.write(b"%%Pages: 1\n")
+ fp.write(b"%%EndComments\n")
+ fp.write(b"%%Page: 1 1\n")
+ fp.write(b"%%ImageData: %d %d " % im.size)
+ fp.write(b'%d %d 0 1 1 "%s"\n' % operator)
+
+ # image header
+ fp.write(b"gsave\n")
+ fp.write(b"10 dict begin\n")
+ fp.write(b"/buf %d string def\n" % (im.size[0] * operator[1]))
+ fp.write(b"%d %d scale\n" % im.size)
+ fp.write(b"%d %d 8\n" % im.size) # <= bits
+ fp.write(b"[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1]))
+ fp.write(b"{ currentfile buf readhexstring pop } bind\n")
+ fp.write(operator[2] + b"\n")
+ if hasattr(fp, "flush"):
+ fp.flush()
+
+ ImageFile._save(im, fp, [("eps", (0, 0) + im.size, 0, None)])
+
+ fp.write(b"\n%%%%EndBinary\n")
+ fp.write(b"grestore end\n")
+ if hasattr(fp, "flush"):
+ fp.flush()
+
+
+# --------------------------------------------------------------------
+
+
+Image.register_open(EpsImageFile.format, EpsImageFile, _accept)
+
+Image.register_save(EpsImageFile.format, _save)
+
+Image.register_extensions(EpsImageFile.format, [".ps", ".eps"])
+
+Image.register_mime(EpsImageFile.format, "application/postscript")
diff --git a/Lib/site-packages/PIL/ExifTags.py b/Lib/site-packages/PIL/ExifTags.py
new file mode 100644
index 0000000..60a4d97
--- /dev/null
+++ b/Lib/site-packages/PIL/ExifTags.py
@@ -0,0 +1,381 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# EXIF tags
+#
+# Copyright (c) 2003 by Secret Labs AB
+#
+# See the README file for information on usage and redistribution.
+#
+
+"""
+This module provides constants and clear-text names for various
+well-known EXIF tags.
+"""
+from __future__ import annotations
+
+from enum import IntEnum
+
+
+class Base(IntEnum):
+ # possibly incomplete
+ InteropIndex = 0x0001
+ ProcessingSoftware = 0x000B
+ NewSubfileType = 0x00FE
+ SubfileType = 0x00FF
+ ImageWidth = 0x0100
+ ImageLength = 0x0101
+ BitsPerSample = 0x0102
+ Compression = 0x0103
+ PhotometricInterpretation = 0x0106
+ Thresholding = 0x0107
+ CellWidth = 0x0108
+ CellLength = 0x0109
+ FillOrder = 0x010A
+ DocumentName = 0x010D
+ ImageDescription = 0x010E
+ Make = 0x010F
+ Model = 0x0110
+ StripOffsets = 0x0111
+ Orientation = 0x0112
+ SamplesPerPixel = 0x0115
+ RowsPerStrip = 0x0116
+ StripByteCounts = 0x0117
+ MinSampleValue = 0x0118
+ MaxSampleValue = 0x0119
+ XResolution = 0x011A
+ YResolution = 0x011B
+ PlanarConfiguration = 0x011C
+ PageName = 0x011D
+ FreeOffsets = 0x0120
+ FreeByteCounts = 0x0121
+ GrayResponseUnit = 0x0122
+ GrayResponseCurve = 0x0123
+ T4Options = 0x0124
+ T6Options = 0x0125
+ ResolutionUnit = 0x0128
+ PageNumber = 0x0129
+ TransferFunction = 0x012D
+ Software = 0x0131
+ DateTime = 0x0132
+ Artist = 0x013B
+ HostComputer = 0x013C
+ Predictor = 0x013D
+ WhitePoint = 0x013E
+ PrimaryChromaticities = 0x013F
+ ColorMap = 0x0140
+ HalftoneHints = 0x0141
+ TileWidth = 0x0142
+ TileLength = 0x0143
+ TileOffsets = 0x0144
+ TileByteCounts = 0x0145
+ SubIFDs = 0x014A
+ InkSet = 0x014C
+ InkNames = 0x014D
+ NumberOfInks = 0x014E
+ DotRange = 0x0150
+ TargetPrinter = 0x0151
+ ExtraSamples = 0x0152
+ SampleFormat = 0x0153
+ SMinSampleValue = 0x0154
+ SMaxSampleValue = 0x0155
+ TransferRange = 0x0156
+ ClipPath = 0x0157
+ XClipPathUnits = 0x0158
+ YClipPathUnits = 0x0159
+ Indexed = 0x015A
+ JPEGTables = 0x015B
+ OPIProxy = 0x015F
+ JPEGProc = 0x0200
+ JpegIFOffset = 0x0201
+ JpegIFByteCount = 0x0202
+ JpegRestartInterval = 0x0203
+ JpegLosslessPredictors = 0x0205
+ JpegPointTransforms = 0x0206
+ JpegQTables = 0x0207
+ JpegDCTables = 0x0208
+ JpegACTables = 0x0209
+ YCbCrCoefficients = 0x0211
+ YCbCrSubSampling = 0x0212
+ YCbCrPositioning = 0x0213
+ ReferenceBlackWhite = 0x0214
+ XMLPacket = 0x02BC
+ RelatedImageFileFormat = 0x1000
+ RelatedImageWidth = 0x1001
+ RelatedImageLength = 0x1002
+ Rating = 0x4746
+ RatingPercent = 0x4749
+ ImageID = 0x800D
+ CFARepeatPatternDim = 0x828D
+ BatteryLevel = 0x828F
+ Copyright = 0x8298
+ ExposureTime = 0x829A
+ FNumber = 0x829D
+ IPTCNAA = 0x83BB
+ ImageResources = 0x8649
+ ExifOffset = 0x8769
+ InterColorProfile = 0x8773
+ ExposureProgram = 0x8822
+ SpectralSensitivity = 0x8824
+ GPSInfo = 0x8825
+ ISOSpeedRatings = 0x8827
+ OECF = 0x8828
+ Interlace = 0x8829
+ TimeZoneOffset = 0x882A
+ SelfTimerMode = 0x882B
+ SensitivityType = 0x8830
+ StandardOutputSensitivity = 0x8831
+ RecommendedExposureIndex = 0x8832
+ ISOSpeed = 0x8833
+ ISOSpeedLatitudeyyy = 0x8834
+ ISOSpeedLatitudezzz = 0x8835
+ ExifVersion = 0x9000
+ DateTimeOriginal = 0x9003
+ DateTimeDigitized = 0x9004
+ OffsetTime = 0x9010
+ OffsetTimeOriginal = 0x9011
+ OffsetTimeDigitized = 0x9012
+ ComponentsConfiguration = 0x9101
+ CompressedBitsPerPixel = 0x9102
+ ShutterSpeedValue = 0x9201
+ ApertureValue = 0x9202
+ BrightnessValue = 0x9203
+ ExposureBiasValue = 0x9204
+ MaxApertureValue = 0x9205
+ SubjectDistance = 0x9206
+ MeteringMode = 0x9207
+ LightSource = 0x9208
+ Flash = 0x9209
+ FocalLength = 0x920A
+ Noise = 0x920D
+ ImageNumber = 0x9211
+ SecurityClassification = 0x9212
+ ImageHistory = 0x9213
+ TIFFEPStandardID = 0x9216
+ MakerNote = 0x927C
+ UserComment = 0x9286
+ SubsecTime = 0x9290
+ SubsecTimeOriginal = 0x9291
+ SubsecTimeDigitized = 0x9292
+ AmbientTemperature = 0x9400
+ Humidity = 0x9401
+ Pressure = 0x9402
+ WaterDepth = 0x9403
+ Acceleration = 0x9404
+ CameraElevationAngle = 0x9405
+ XPTitle = 0x9C9B
+ XPComment = 0x9C9C
+ XPAuthor = 0x9C9D
+ XPKeywords = 0x9C9E
+ XPSubject = 0x9C9F
+ FlashPixVersion = 0xA000
+ ColorSpace = 0xA001
+ ExifImageWidth = 0xA002
+ ExifImageHeight = 0xA003
+ RelatedSoundFile = 0xA004
+ ExifInteroperabilityOffset = 0xA005
+ FlashEnergy = 0xA20B
+ SpatialFrequencyResponse = 0xA20C
+ FocalPlaneXResolution = 0xA20E
+ FocalPlaneYResolution = 0xA20F
+ FocalPlaneResolutionUnit = 0xA210
+ SubjectLocation = 0xA214
+ ExposureIndex = 0xA215
+ SensingMethod = 0xA217
+ FileSource = 0xA300
+ SceneType = 0xA301
+ CFAPattern = 0xA302
+ CustomRendered = 0xA401
+ ExposureMode = 0xA402
+ WhiteBalance = 0xA403
+ DigitalZoomRatio = 0xA404
+ FocalLengthIn35mmFilm = 0xA405
+ SceneCaptureType = 0xA406
+ GainControl = 0xA407
+ Contrast = 0xA408
+ Saturation = 0xA409
+ Sharpness = 0xA40A
+ DeviceSettingDescription = 0xA40B
+ SubjectDistanceRange = 0xA40C
+ ImageUniqueID = 0xA420
+ CameraOwnerName = 0xA430
+ BodySerialNumber = 0xA431
+ LensSpecification = 0xA432
+ LensMake = 0xA433
+ LensModel = 0xA434
+ LensSerialNumber = 0xA435
+ CompositeImage = 0xA460
+ CompositeImageCount = 0xA461
+ CompositeImageExposureTimes = 0xA462
+ Gamma = 0xA500
+ PrintImageMatching = 0xC4A5
+ DNGVersion = 0xC612
+ DNGBackwardVersion = 0xC613
+ UniqueCameraModel = 0xC614
+ LocalizedCameraModel = 0xC615
+ CFAPlaneColor = 0xC616
+ CFALayout = 0xC617
+ LinearizationTable = 0xC618
+ BlackLevelRepeatDim = 0xC619
+ BlackLevel = 0xC61A
+ BlackLevelDeltaH = 0xC61B
+ BlackLevelDeltaV = 0xC61C
+ WhiteLevel = 0xC61D
+ DefaultScale = 0xC61E
+ DefaultCropOrigin = 0xC61F
+ DefaultCropSize = 0xC620
+ ColorMatrix1 = 0xC621
+ ColorMatrix2 = 0xC622
+ CameraCalibration1 = 0xC623
+ CameraCalibration2 = 0xC624
+ ReductionMatrix1 = 0xC625
+ ReductionMatrix2 = 0xC626
+ AnalogBalance = 0xC627
+ AsShotNeutral = 0xC628
+ AsShotWhiteXY = 0xC629
+ BaselineExposure = 0xC62A
+ BaselineNoise = 0xC62B
+ BaselineSharpness = 0xC62C
+ BayerGreenSplit = 0xC62D
+ LinearResponseLimit = 0xC62E
+ CameraSerialNumber = 0xC62F
+ LensInfo = 0xC630
+ ChromaBlurRadius = 0xC631
+ AntiAliasStrength = 0xC632
+ ShadowScale = 0xC633
+ DNGPrivateData = 0xC634
+ MakerNoteSafety = 0xC635
+ CalibrationIlluminant1 = 0xC65A
+ CalibrationIlluminant2 = 0xC65B
+ BestQualityScale = 0xC65C
+ RawDataUniqueID = 0xC65D
+ OriginalRawFileName = 0xC68B
+ OriginalRawFileData = 0xC68C
+ ActiveArea = 0xC68D
+ MaskedAreas = 0xC68E
+ AsShotICCProfile = 0xC68F
+ AsShotPreProfileMatrix = 0xC690
+ CurrentICCProfile = 0xC691
+ CurrentPreProfileMatrix = 0xC692
+ ColorimetricReference = 0xC6BF
+ CameraCalibrationSignature = 0xC6F3
+ ProfileCalibrationSignature = 0xC6F4
+ AsShotProfileName = 0xC6F6
+ NoiseReductionApplied = 0xC6F7
+ ProfileName = 0xC6F8
+ ProfileHueSatMapDims = 0xC6F9
+ ProfileHueSatMapData1 = 0xC6FA
+ ProfileHueSatMapData2 = 0xC6FB
+ ProfileToneCurve = 0xC6FC
+ ProfileEmbedPolicy = 0xC6FD
+ ProfileCopyright = 0xC6FE
+ ForwardMatrix1 = 0xC714
+ ForwardMatrix2 = 0xC715
+ PreviewApplicationName = 0xC716
+ PreviewApplicationVersion = 0xC717
+ PreviewSettingsName = 0xC718
+ PreviewSettingsDigest = 0xC719
+ PreviewColorSpace = 0xC71A
+ PreviewDateTime = 0xC71B
+ RawImageDigest = 0xC71C
+ OriginalRawFileDigest = 0xC71D
+ SubTileBlockSize = 0xC71E
+ RowInterleaveFactor = 0xC71F
+ ProfileLookTableDims = 0xC725
+ ProfileLookTableData = 0xC726
+ OpcodeList1 = 0xC740
+ OpcodeList2 = 0xC741
+ OpcodeList3 = 0xC74E
+ NoiseProfile = 0xC761
+
+
+"""Maps EXIF tags to tag names."""
+TAGS = {
+ **{i.value: i.name for i in Base},
+ 0x920C: "SpatialFrequencyResponse",
+ 0x9214: "SubjectLocation",
+ 0x9215: "ExposureIndex",
+ 0x828E: "CFAPattern",
+ 0x920B: "FlashEnergy",
+ 0x9216: "TIFF/EPStandardID",
+}
+
+
+class GPS(IntEnum):
+ GPSVersionID = 0
+ GPSLatitudeRef = 1
+ GPSLatitude = 2
+ GPSLongitudeRef = 3
+ GPSLongitude = 4
+ GPSAltitudeRef = 5
+ GPSAltitude = 6
+ GPSTimeStamp = 7
+ GPSSatellites = 8
+ GPSStatus = 9
+ GPSMeasureMode = 10
+ GPSDOP = 11
+ GPSSpeedRef = 12
+ GPSSpeed = 13
+ GPSTrackRef = 14
+ GPSTrack = 15
+ GPSImgDirectionRef = 16
+ GPSImgDirection = 17
+ GPSMapDatum = 18
+ GPSDestLatitudeRef = 19
+ GPSDestLatitude = 20
+ GPSDestLongitudeRef = 21
+ GPSDestLongitude = 22
+ GPSDestBearingRef = 23
+ GPSDestBearing = 24
+ GPSDestDistanceRef = 25
+ GPSDestDistance = 26
+ GPSProcessingMethod = 27
+ GPSAreaInformation = 28
+ GPSDateStamp = 29
+ GPSDifferential = 30
+ GPSHPositioningError = 31
+
+
+"""Maps EXIF GPS tags to tag names."""
+GPSTAGS = {i.value: i.name for i in GPS}
+
+
+class Interop(IntEnum):
+ InteropIndex = 1
+ InteropVersion = 2
+ RelatedImageFileFormat = 4096
+ RelatedImageWidth = 4097
+ RleatedImageHeight = 4098
+
+
+class IFD(IntEnum):
+ Exif = 34665
+ GPSInfo = 34853
+ Makernote = 37500
+ Interop = 40965
+ IFD1 = -1
+
+
+class LightSource(IntEnum):
+ Unknown = 0
+ Daylight = 1
+ Fluorescent = 2
+ Tungsten = 3
+ Flash = 4
+ Fine = 9
+ Cloudy = 10
+ Shade = 11
+ DaylightFluorescent = 12
+ DayWhiteFluorescent = 13
+ CoolWhiteFluorescent = 14
+ WhiteFluorescent = 15
+ StandardLightA = 17
+ StandardLightB = 18
+ StandardLightC = 19
+ D55 = 20
+ D65 = 21
+ D75 = 22
+ D50 = 23
+ ISO = 24
+ Other = 255
diff --git a/Lib/site-packages/PIL/FitsImagePlugin.py b/Lib/site-packages/PIL/FitsImagePlugin.py
new file mode 100644
index 0000000..7dce2d6
--- /dev/null
+++ b/Lib/site-packages/PIL/FitsImagePlugin.py
@@ -0,0 +1,72 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# FITS file handling
+#
+# Copyright (c) 1998-2003 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import math
+
+from . import Image, ImageFile
+
+
+def _accept(prefix):
+ return prefix[:6] == b"SIMPLE"
+
+
+class FitsImageFile(ImageFile.ImageFile):
+ format = "FITS"
+ format_description = "FITS"
+
+ def _open(self):
+ headers = {}
+ while True:
+ header = self.fp.read(80)
+ if not header:
+ msg = "Truncated FITS file"
+ raise OSError(msg)
+ keyword = header[:8].strip()
+ if keyword == b"END":
+ break
+ value = header[8:].split(b"/")[0].strip()
+ if value.startswith(b"="):
+ value = value[1:].strip()
+ if not headers and (not _accept(keyword) or value != b"T"):
+ msg = "Not a FITS file"
+ raise SyntaxError(msg)
+ headers[keyword] = value
+
+ naxis = int(headers[b"NAXIS"])
+ if naxis == 0:
+ msg = "No image data"
+ raise ValueError(msg)
+ elif naxis == 1:
+ self._size = 1, int(headers[b"NAXIS1"])
+ else:
+ self._size = int(headers[b"NAXIS1"]), int(headers[b"NAXIS2"])
+
+ number_of_bits = int(headers[b"BITPIX"])
+ if number_of_bits == 8:
+ self._mode = "L"
+ elif number_of_bits == 16:
+ self._mode = "I"
+ elif number_of_bits == 32:
+ self._mode = "I"
+ elif number_of_bits in (-32, -64):
+ self._mode = "F"
+
+ offset = math.ceil(self.fp.tell() / 2880) * 2880
+ self.tile = [("raw", (0, 0) + self.size, offset, (self.mode, 0, -1))]
+
+
+# --------------------------------------------------------------------
+# Registry
+
+Image.register_open(FitsImageFile.format, FitsImageFile, _accept)
+
+Image.register_extensions(FitsImageFile.format, [".fit", ".fits"])
diff --git a/Lib/site-packages/PIL/FliImagePlugin.py b/Lib/site-packages/PIL/FliImagePlugin.py
new file mode 100644
index 0000000..9769761
--- /dev/null
+++ b/Lib/site-packages/PIL/FliImagePlugin.py
@@ -0,0 +1,173 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# FLI/FLC file handling.
+#
+# History:
+# 95-09-01 fl Created
+# 97-01-03 fl Fixed parser, setup decoder tile
+# 98-07-15 fl Renamed offset attribute to avoid name clash
+#
+# Copyright (c) Secret Labs AB 1997-98.
+# Copyright (c) Fredrik Lundh 1995-97.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import os
+
+from . import Image, ImageFile, ImagePalette
+from ._binary import i16le as i16
+from ._binary import i32le as i32
+from ._binary import o8
+
+#
+# decoder
+
+
+def _accept(prefix):
+ return (
+ len(prefix) >= 6
+ and i16(prefix, 4) in [0xAF11, 0xAF12]
+ and i16(prefix, 14) in [0, 3] # flags
+ )
+
+
+##
+# Image plugin for the FLI/FLC animation format. Use the seek
+# method to load individual frames.
+
+
+class FliImageFile(ImageFile.ImageFile):
+ format = "FLI"
+ format_description = "Autodesk FLI/FLC Animation"
+ _close_exclusive_fp_after_loading = False
+
+ def _open(self):
+ # HEAD
+ s = self.fp.read(128)
+ if not (_accept(s) and s[20:22] == b"\x00\x00"):
+ msg = "not an FLI/FLC file"
+ raise SyntaxError(msg)
+
+ # frames
+ self.n_frames = i16(s, 6)
+ self.is_animated = self.n_frames > 1
+
+ # image characteristics
+ self._mode = "P"
+ self._size = i16(s, 8), i16(s, 10)
+
+ # animation speed
+ duration = i32(s, 16)
+ magic = i16(s, 4)
+ if magic == 0xAF11:
+ duration = (duration * 1000) // 70
+ self.info["duration"] = duration
+
+ # look for palette
+ palette = [(a, a, a) for a in range(256)]
+
+ s = self.fp.read(16)
+
+ self.__offset = 128
+
+ if i16(s, 4) == 0xF100:
+ # prefix chunk; ignore it
+ self.__offset = self.__offset + i32(s)
+ s = self.fp.read(16)
+
+ if i16(s, 4) == 0xF1FA:
+ # look for palette chunk
+ number_of_subchunks = i16(s, 6)
+ chunk_size = None
+ for _ in range(number_of_subchunks):
+ if chunk_size is not None:
+ self.fp.seek(chunk_size - 6, os.SEEK_CUR)
+ s = self.fp.read(6)
+ chunk_type = i16(s, 4)
+ if chunk_type in (4, 11):
+ self._palette(palette, 2 if chunk_type == 11 else 0)
+ break
+ chunk_size = i32(s)
+ if not chunk_size:
+ break
+
+ palette = [o8(r) + o8(g) + o8(b) for (r, g, b) in palette]
+ self.palette = ImagePalette.raw("RGB", b"".join(palette))
+
+ # set things up to decode first frame
+ self.__frame = -1
+ self._fp = self.fp
+ self.__rewind = self.fp.tell()
+ self.seek(0)
+
+ def _palette(self, palette, shift):
+ # load palette
+
+ i = 0
+ for e in range(i16(self.fp.read(2))):
+ s = self.fp.read(2)
+ i = i + s[0]
+ n = s[1]
+ if n == 0:
+ n = 256
+ s = self.fp.read(n * 3)
+ for n in range(0, len(s), 3):
+ r = s[n] << shift
+ g = s[n + 1] << shift
+ b = s[n + 2] << shift
+ palette[i] = (r, g, b)
+ i += 1
+
+ def seek(self, frame):
+ if not self._seek_check(frame):
+ return
+ if frame < self.__frame:
+ self._seek(0)
+
+ for f in range(self.__frame + 1, frame + 1):
+ self._seek(f)
+
+ def _seek(self, frame):
+ if frame == 0:
+ self.__frame = -1
+ self._fp.seek(self.__rewind)
+ self.__offset = 128
+ else:
+ # ensure that the previous frame was loaded
+ self.load()
+
+ if frame != self.__frame + 1:
+ msg = f"cannot seek to frame {frame}"
+ raise ValueError(msg)
+ self.__frame = frame
+
+ # move to next frame
+ self.fp = self._fp
+ self.fp.seek(self.__offset)
+
+ s = self.fp.read(4)
+ if not s:
+ msg = "missing frame size"
+ raise EOFError(msg)
+
+ framesize = i32(s)
+
+ self.decodermaxblock = framesize
+ self.tile = [("fli", (0, 0) + self.size, self.__offset, None)]
+
+ self.__offset += framesize
+
+ def tell(self):
+ return self.__frame
+
+
+#
+# registry
+
+Image.register_open(FliImageFile.format, FliImageFile, _accept)
+
+Image.register_extensions(FliImageFile.format, [".fli", ".flc"])
diff --git a/Lib/site-packages/PIL/FontFile.py b/Lib/site-packages/PIL/FontFile.py
new file mode 100644
index 0000000..3ec1ae8
--- /dev/null
+++ b/Lib/site-packages/PIL/FontFile.py
@@ -0,0 +1,136 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# base class for raster font file parsers
+#
+# history:
+# 1997-06-05 fl created
+# 1997-08-19 fl restrict image width
+#
+# Copyright (c) 1997-1998 by Secret Labs AB
+# Copyright (c) 1997-1998 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import os
+from typing import BinaryIO
+
+from . import Image, _binary
+
+WIDTH = 800
+
+
+def puti16(
+ fp: BinaryIO, values: tuple[int, int, int, int, int, int, int, int, int, int]
+) -> None:
+ """Write network order (big-endian) 16-bit sequence"""
+ for v in values:
+ if v < 0:
+ v += 65536
+ fp.write(_binary.o16be(v))
+
+
+class FontFile:
+ """Base class for raster font file handlers."""
+
+ bitmap: Image.Image | None = None
+
+ def __init__(self) -> None:
+ self.info: dict[bytes, bytes | int] = {}
+ self.glyph: list[
+ tuple[
+ tuple[int, int],
+ tuple[int, int, int, int],
+ tuple[int, int, int, int],
+ Image.Image,
+ ]
+ | None
+ ] = [None] * 256
+
+ def __getitem__(
+ self, ix: int
+ ) -> (
+ tuple[
+ tuple[int, int],
+ tuple[int, int, int, int],
+ tuple[int, int, int, int],
+ Image.Image,
+ ]
+ | None
+ ):
+ return self.glyph[ix]
+
+ def compile(self) -> None:
+ """Create metrics and bitmap"""
+
+ if self.bitmap:
+ return
+
+ # create bitmap large enough to hold all data
+ h = w = maxwidth = 0
+ lines = 1
+ for glyph in self.glyph:
+ if glyph:
+ d, dst, src, im = glyph
+ h = max(h, src[3] - src[1])
+ w = w + (src[2] - src[0])
+ if w > WIDTH:
+ lines += 1
+ w = src[2] - src[0]
+ maxwidth = max(maxwidth, w)
+
+ xsize = maxwidth
+ ysize = lines * h
+
+ if xsize == 0 and ysize == 0:
+ return
+
+ self.ysize = h
+
+ # paste glyphs into bitmap
+ self.bitmap = Image.new("1", (xsize, ysize))
+ self.metrics: list[
+ tuple[tuple[int, int], tuple[int, int, int, int], tuple[int, int, int, int]]
+ | None
+ ] = [None] * 256
+ x = y = 0
+ for i in range(256):
+ glyph = self[i]
+ if glyph:
+ d, dst, src, im = glyph
+ xx = src[2] - src[0]
+ x0, y0 = x, y
+ x = x + xx
+ if x > WIDTH:
+ x, y = 0, y + h
+ x0, y0 = x, y
+ x = xx
+ s = src[0] + x0, src[1] + y0, src[2] + x0, src[3] + y0
+ self.bitmap.paste(im.crop(src), s)
+ self.metrics[i] = d, dst, s
+
+ def save(self, filename: str) -> None:
+ """Save font"""
+
+ self.compile()
+
+ # font data
+ if not self.bitmap:
+ msg = "No bitmap created"
+ raise ValueError(msg)
+ self.bitmap.save(os.path.splitext(filename)[0] + ".pbm", "PNG")
+
+ # font metrics
+ with open(os.path.splitext(filename)[0] + ".pil", "wb") as fp:
+ fp.write(b"PILfont\n")
+ fp.write(f";;;;;;{self.ysize};\n".encode("ascii")) # HACK!!!
+ fp.write(b"DATA\n")
+ for id in range(256):
+ m = self.metrics[id]
+ if not m:
+ puti16(fp, (0,) * 10)
+ else:
+ puti16(fp, m[0] + m[1] + m[2])
diff --git a/Lib/site-packages/PIL/FpxImagePlugin.py b/Lib/site-packages/PIL/FpxImagePlugin.py
new file mode 100644
index 0000000..75680a9
--- /dev/null
+++ b/Lib/site-packages/PIL/FpxImagePlugin.py
@@ -0,0 +1,255 @@
+#
+# THIS IS WORK IN PROGRESS
+#
+# The Python Imaging Library.
+# $Id$
+#
+# FlashPix support for PIL
+#
+# History:
+# 97-01-25 fl Created (reads uncompressed RGB images only)
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1997.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import olefile
+
+from . import Image, ImageFile
+from ._binary import i32le as i32
+
+# we map from colour field tuples to (mode, rawmode) descriptors
+MODES = {
+ # opacity
+ (0x00007FFE,): ("A", "L"),
+ # monochrome
+ (0x00010000,): ("L", "L"),
+ (0x00018000, 0x00017FFE): ("RGBA", "LA"),
+ # photo YCC
+ (0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"),
+ (0x00028000, 0x00028001, 0x00028002, 0x00027FFE): ("RGBA", "YCCA;P"),
+ # standard RGB (NIFRGB)
+ (0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"),
+ (0x00038000, 0x00038001, 0x00038002, 0x00037FFE): ("RGBA", "RGBA"),
+}
+
+
+#
+# --------------------------------------------------------------------
+
+
+def _accept(prefix):
+ return prefix[:8] == olefile.MAGIC
+
+
+##
+# Image plugin for the FlashPix images.
+
+
+class FpxImageFile(ImageFile.ImageFile):
+ format = "FPX"
+ format_description = "FlashPix"
+
+ def _open(self):
+ #
+ # read the OLE directory and see if this is a likely
+ # to be a FlashPix file
+
+ try:
+ self.ole = olefile.OleFileIO(self.fp)
+ except OSError as e:
+ msg = "not an FPX file; invalid OLE file"
+ raise SyntaxError(msg) from e
+
+ if self.ole.root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B":
+ msg = "not an FPX file; bad root CLSID"
+ raise SyntaxError(msg)
+
+ self._open_index(1)
+
+ def _open_index(self, index=1):
+ #
+ # get the Image Contents Property Set
+
+ prop = self.ole.getproperties(
+ [f"Data Object Store {index:06d}", "\005Image Contents"]
+ )
+
+ # size (highest resolution)
+
+ self._size = prop[0x1000002], prop[0x1000003]
+
+ size = max(self.size)
+ i = 1
+ while size > 64:
+ size = size / 2
+ i += 1
+ self.maxid = i - 1
+
+ # mode. instead of using a single field for this, flashpix
+ # requires you to specify the mode for each channel in each
+ # resolution subimage, and leaves it to the decoder to make
+ # sure that they all match. for now, we'll cheat and assume
+ # that this is always the case.
+
+ id = self.maxid << 16
+
+ s = prop[0x2000002 | id]
+
+ bands = i32(s, 4)
+ if bands > 4:
+ msg = "Invalid number of bands"
+ raise OSError(msg)
+
+ # note: for now, we ignore the "uncalibrated" flag
+ colors = tuple(i32(s, 8 + i * 4) & 0x7FFFFFFF for i in range(bands))
+
+ self._mode, self.rawmode = MODES[colors]
+
+ # load JPEG tables, if any
+ self.jpeg = {}
+ for i in range(256):
+ id = 0x3000001 | (i << 16)
+ if id in prop:
+ self.jpeg[i] = prop[id]
+
+ self._open_subimage(1, self.maxid)
+
+ def _open_subimage(self, index=1, subimage=0):
+ #
+ # setup tile descriptors for a given subimage
+
+ stream = [
+ f"Data Object Store {index:06d}",
+ f"Resolution {subimage:04d}",
+ "Subimage 0000 Header",
+ ]
+
+ fp = self.ole.openstream(stream)
+
+ # skip prefix
+ fp.read(28)
+
+ # header stream
+ s = fp.read(36)
+
+ size = i32(s, 4), i32(s, 8)
+ # tilecount = i32(s, 12)
+ tilesize = i32(s, 16), i32(s, 20)
+ # channels = i32(s, 24)
+ offset = i32(s, 28)
+ length = i32(s, 32)
+
+ if size != self.size:
+ msg = "subimage mismatch"
+ raise OSError(msg)
+
+ # get tile descriptors
+ fp.seek(28 + offset)
+ s = fp.read(i32(s, 12) * length)
+
+ x = y = 0
+ xsize, ysize = size
+ xtile, ytile = tilesize
+ self.tile = []
+
+ for i in range(0, len(s), length):
+ x1 = min(xsize, x + xtile)
+ y1 = min(ysize, y + ytile)
+
+ compression = i32(s, i + 8)
+
+ if compression == 0:
+ self.tile.append(
+ (
+ "raw",
+ (x, y, x1, y1),
+ i32(s, i) + 28,
+ (self.rawmode,),
+ )
+ )
+
+ elif compression == 1:
+ # FIXME: the fill decoder is not implemented
+ self.tile.append(
+ (
+ "fill",
+ (x, y, x1, y1),
+ i32(s, i) + 28,
+ (self.rawmode, s[12:16]),
+ )
+ )
+
+ elif compression == 2:
+ internal_color_conversion = s[14]
+ jpeg_tables = s[15]
+ rawmode = self.rawmode
+
+ if internal_color_conversion:
+ # The image is stored as usual (usually YCbCr).
+ if rawmode == "RGBA":
+ # For "RGBA", data is stored as YCbCrA based on
+ # negative RGB. The following trick works around
+ # this problem :
+ jpegmode, rawmode = "YCbCrK", "CMYK"
+ else:
+ jpegmode = None # let the decoder decide
+
+ else:
+ # The image is stored as defined by rawmode
+ jpegmode = rawmode
+
+ self.tile.append(
+ (
+ "jpeg",
+ (x, y, x1, y1),
+ i32(s, i) + 28,
+ (rawmode, jpegmode),
+ )
+ )
+
+ # FIXME: jpeg tables are tile dependent; the prefix
+ # data must be placed in the tile descriptor itself!
+
+ if jpeg_tables:
+ self.tile_prefix = self.jpeg[jpeg_tables]
+
+ else:
+ msg = "unknown/invalid compression"
+ raise OSError(msg)
+
+ x = x + xtile
+ if x >= xsize:
+ x, y = 0, y + ytile
+ if y >= ysize:
+ break # isn't really required
+
+ self.stream = stream
+ self._fp = self.fp
+ self.fp = None
+
+ def load(self):
+ if not self.fp:
+ self.fp = self.ole.openstream(self.stream[:2] + ["Subimage 0000 Data"])
+
+ return ImageFile.ImageFile.load(self)
+
+ def close(self):
+ self.ole.close()
+ super().close()
+
+ def __exit__(self, *args):
+ self.ole.close()
+ super().__exit__()
+
+
+#
+# --------------------------------------------------------------------
+
+
+Image.register_open(FpxImageFile.format, FpxImageFile, _accept)
+
+Image.register_extension(FpxImageFile.format, ".fpx")
diff --git a/Lib/site-packages/PIL/FtexImagePlugin.py b/Lib/site-packages/PIL/FtexImagePlugin.py
new file mode 100644
index 0000000..d5513a5
--- /dev/null
+++ b/Lib/site-packages/PIL/FtexImagePlugin.py
@@ -0,0 +1,114 @@
+"""
+A Pillow loader for .ftc and .ftu files (FTEX)
+Jerome Leclanche
+
+The contents of this file are hereby released in the public domain (CC0)
+Full text of the CC0 license:
+ https://creativecommons.org/publicdomain/zero/1.0/
+
+Independence War 2: Edge Of Chaos - Texture File Format - 16 October 2001
+
+The textures used for 3D objects in Independence War 2: Edge Of Chaos are in a
+packed custom format called FTEX. This file format uses file extensions FTC
+and FTU.
+* FTC files are compressed textures (using standard texture compression).
+* FTU files are not compressed.
+Texture File Format
+The FTC and FTU texture files both use the same format. This
+has the following structure:
+{header}
+{format_directory}
+{data}
+Where:
+{header} = {
+ u32:magic,
+ u32:version,
+ u32:width,
+ u32:height,
+ u32:mipmap_count,
+ u32:format_count
+}
+
+* The "magic" number is "FTEX".
+* "width" and "height" are the dimensions of the texture.
+* "mipmap_count" is the number of mipmaps in the texture.
+* "format_count" is the number of texture formats (different versions of the
+same texture) in this file.
+
+{format_directory} = format_count * { u32:format, u32:where }
+
+The format value is 0 for DXT1 compressed textures and 1 for 24-bit RGB
+uncompressed textures.
+The texture data for a format starts at the position "where" in the file.
+
+Each set of texture data in the file has the following structure:
+{data} = format_count * { u32:mipmap_size, mipmap_size * { u8 } }
+* "mipmap_size" is the number of bytes in that mip level. For compressed
+textures this is the size of the texture data compressed with DXT1. For 24 bit
+uncompressed textures, this is 3 * width * height. Following this are the image
+bytes for that mipmap level.
+
+Note: All data is stored in little-Endian (Intel) byte order.
+"""
+from __future__ import annotations
+
+import struct
+from enum import IntEnum
+from io import BytesIO
+
+from . import Image, ImageFile
+
+MAGIC = b"FTEX"
+
+
+class Format(IntEnum):
+ DXT1 = 0
+ UNCOMPRESSED = 1
+
+
+class FtexImageFile(ImageFile.ImageFile):
+ format = "FTEX"
+ format_description = "Texture File Format (IW2:EOC)"
+
+ def _open(self):
+ if not _accept(self.fp.read(4)):
+ msg = "not an FTEX file"
+ raise SyntaxError(msg)
+ struct.unpack("= 8 and i32(prefix, 0) >= 20 and i32(prefix, 4) in (1, 2)
+
+
+##
+# Image plugin for the GIMP brush format.
+
+
+class GbrImageFile(ImageFile.ImageFile):
+ format = "GBR"
+ format_description = "GIMP brush file"
+
+ def _open(self):
+ header_size = i32(self.fp.read(4))
+ if header_size < 20:
+ msg = "not a GIMP brush"
+ raise SyntaxError(msg)
+ version = i32(self.fp.read(4))
+ if version not in (1, 2):
+ msg = f"Unsupported GIMP brush version: {version}"
+ raise SyntaxError(msg)
+
+ width = i32(self.fp.read(4))
+ height = i32(self.fp.read(4))
+ color_depth = i32(self.fp.read(4))
+ if width <= 0 or height <= 0:
+ msg = "not a GIMP brush"
+ raise SyntaxError(msg)
+ if color_depth not in (1, 4):
+ msg = f"Unsupported GIMP brush color depth: {color_depth}"
+ raise SyntaxError(msg)
+
+ if version == 1:
+ comment_length = header_size - 20
+ else:
+ comment_length = header_size - 28
+ magic_number = self.fp.read(4)
+ if magic_number != b"GIMP":
+ msg = "not a GIMP brush, bad magic number"
+ raise SyntaxError(msg)
+ self.info["spacing"] = i32(self.fp.read(4))
+
+ comment = self.fp.read(comment_length)[:-1]
+
+ if color_depth == 1:
+ self._mode = "L"
+ else:
+ self._mode = "RGBA"
+
+ self._size = width, height
+
+ self.info["comment"] = comment
+
+ # Image might not be small
+ Image._decompression_bomb_check(self.size)
+
+ # Data is an uncompressed block of w * h * bytes/pixel
+ self._data_size = width * height * color_depth
+
+ def load(self):
+ if not self.im:
+ self.im = Image.core.new(self.mode, self.size)
+ self.frombytes(self.fp.read(self._data_size))
+ return Image.Image.load(self)
+
+
+#
+# registry
+
+
+Image.register_open(GbrImageFile.format, GbrImageFile, _accept)
+Image.register_extension(GbrImageFile.format, ".gbr")
diff --git a/Lib/site-packages/PIL/GdImageFile.py b/Lib/site-packages/PIL/GdImageFile.py
new file mode 100644
index 0000000..d84876e
--- /dev/null
+++ b/Lib/site-packages/PIL/GdImageFile.py
@@ -0,0 +1,97 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# GD file handling
+#
+# History:
+# 1996-04-12 fl Created
+#
+# Copyright (c) 1997 by Secret Labs AB.
+# Copyright (c) 1996 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+
+
+"""
+.. note::
+ This format cannot be automatically recognized, so the
+ class is not registered for use with :py:func:`PIL.Image.open()`. To open a
+ gd file, use the :py:func:`PIL.GdImageFile.open()` function instead.
+
+.. warning::
+ THE GD FORMAT IS NOT DESIGNED FOR DATA INTERCHANGE. This
+ implementation is provided for convenience and demonstrational
+ purposes only.
+"""
+from __future__ import annotations
+
+from . import ImageFile, ImagePalette, UnidentifiedImageError
+from ._binary import i16be as i16
+from ._binary import i32be as i32
+
+
+class GdImageFile(ImageFile.ImageFile):
+ """
+ Image plugin for the GD uncompressed format. Note that this format
+ is not supported by the standard :py:func:`PIL.Image.open()` function. To use
+ this plugin, you have to import the :py:mod:`PIL.GdImageFile` module and
+ use the :py:func:`PIL.GdImageFile.open()` function.
+ """
+
+ format = "GD"
+ format_description = "GD uncompressed images"
+
+ def _open(self):
+ # Header
+ s = self.fp.read(1037)
+
+ if i16(s) not in [65534, 65535]:
+ msg = "Not a valid GD 2.x .gd file"
+ raise SyntaxError(msg)
+
+ self._mode = "L" # FIXME: "P"
+ self._size = i16(s, 2), i16(s, 4)
+
+ true_color = s[6]
+ true_color_offset = 2 if true_color else 0
+
+ # transparency index
+ tindex = i32(s, 7 + true_color_offset)
+ if tindex < 256:
+ self.info["transparency"] = tindex
+
+ self.palette = ImagePalette.raw(
+ "XBGR", s[7 + true_color_offset + 4 : 7 + true_color_offset + 4 + 256 * 4]
+ )
+
+ self.tile = [
+ (
+ "raw",
+ (0, 0) + self.size,
+ 7 + true_color_offset + 4 + 256 * 4,
+ ("L", 0, 1),
+ )
+ ]
+
+
+def open(fp, mode="r"):
+ """
+ Load texture from a GD image file.
+
+ :param fp: GD file name, or an opened file handle.
+ :param mode: Optional mode. In this version, if the mode argument
+ is given, it must be "r".
+ :returns: An image instance.
+ :raises OSError: If the image could not be read.
+ """
+ if mode != "r":
+ msg = "bad mode"
+ raise ValueError(msg)
+
+ try:
+ return GdImageFile(fp)
+ except SyntaxError as e:
+ msg = "cannot identify this image file"
+ raise UnidentifiedImageError(msg) from e
diff --git a/Lib/site-packages/PIL/GifImagePlugin.py b/Lib/site-packages/PIL/GifImagePlugin.py
new file mode 100644
index 0000000..57d8707
--- /dev/null
+++ b/Lib/site-packages/PIL/GifImagePlugin.py
@@ -0,0 +1,1097 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# GIF file handling
+#
+# History:
+# 1995-09-01 fl Created
+# 1996-12-14 fl Added interlace support
+# 1996-12-30 fl Added animation support
+# 1997-01-05 fl Added write support, fixed local colour map bug
+# 1997-02-23 fl Make sure to load raster data in getdata()
+# 1997-07-05 fl Support external decoder (0.4)
+# 1998-07-09 fl Handle all modes when saving (0.5)
+# 1998-07-15 fl Renamed offset attribute to avoid name clash
+# 2001-04-16 fl Added rewind support (seek to frame 0) (0.6)
+# 2001-04-17 fl Added palette optimization (0.7)
+# 2002-06-06 fl Added transparency support for save (0.8)
+# 2004-02-24 fl Disable interlacing for small images
+#
+# Copyright (c) 1997-2004 by Secret Labs AB
+# Copyright (c) 1995-2004 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import itertools
+import math
+import os
+import subprocess
+from enum import IntEnum
+
+from . import (
+ Image,
+ ImageChops,
+ ImageFile,
+ ImageMath,
+ ImageOps,
+ ImagePalette,
+ ImageSequence,
+)
+from ._binary import i16le as i16
+from ._binary import o8
+from ._binary import o16le as o16
+
+
+class LoadingStrategy(IntEnum):
+ """.. versionadded:: 9.1.0"""
+
+ RGB_AFTER_FIRST = 0
+ RGB_AFTER_DIFFERENT_PALETTE_ONLY = 1
+ RGB_ALWAYS = 2
+
+
+#: .. versionadded:: 9.1.0
+LOADING_STRATEGY = LoadingStrategy.RGB_AFTER_FIRST
+
+# --------------------------------------------------------------------
+# Identify/read GIF files
+
+
+def _accept(prefix):
+ return prefix[:6] in [b"GIF87a", b"GIF89a"]
+
+
+##
+# Image plugin for GIF images. This plugin supports both GIF87 and
+# GIF89 images.
+
+
+class GifImageFile(ImageFile.ImageFile):
+ format = "GIF"
+ format_description = "Compuserve GIF"
+ _close_exclusive_fp_after_loading = False
+
+ global_palette = None
+
+ def data(self):
+ s = self.fp.read(1)
+ if s and s[0]:
+ return self.fp.read(s[0])
+ return None
+
+ def _is_palette_needed(self, p):
+ for i in range(0, len(p), 3):
+ if not (i // 3 == p[i] == p[i + 1] == p[i + 2]):
+ return True
+ return False
+
+ def _open(self):
+ # Screen
+ s = self.fp.read(13)
+ if not _accept(s):
+ msg = "not a GIF file"
+ raise SyntaxError(msg)
+
+ self.info["version"] = s[:6]
+ self._size = i16(s, 6), i16(s, 8)
+ self.tile = []
+ flags = s[10]
+ bits = (flags & 7) + 1
+
+ if flags & 128:
+ # get global palette
+ self.info["background"] = s[11]
+ # check if palette contains colour indices
+ p = self.fp.read(3 << bits)
+ if self._is_palette_needed(p):
+ p = ImagePalette.raw("RGB", p)
+ self.global_palette = self.palette = p
+
+ self._fp = self.fp # FIXME: hack
+ self.__rewind = self.fp.tell()
+ self._n_frames = None
+ self._is_animated = None
+ self._seek(0) # get ready to read first frame
+
+ @property
+ def n_frames(self):
+ if self._n_frames is None:
+ current = self.tell()
+ try:
+ while True:
+ self._seek(self.tell() + 1, False)
+ except EOFError:
+ self._n_frames = self.tell() + 1
+ self.seek(current)
+ return self._n_frames
+
+ @property
+ def is_animated(self):
+ if self._is_animated is None:
+ if self._n_frames is not None:
+ self._is_animated = self._n_frames != 1
+ else:
+ current = self.tell()
+ if current:
+ self._is_animated = True
+ else:
+ try:
+ self._seek(1, False)
+ self._is_animated = True
+ except EOFError:
+ self._is_animated = False
+
+ self.seek(current)
+ return self._is_animated
+
+ def seek(self, frame):
+ if not self._seek_check(frame):
+ return
+ if frame < self.__frame:
+ self.im = None
+ self._seek(0)
+
+ last_frame = self.__frame
+ for f in range(self.__frame + 1, frame + 1):
+ try:
+ self._seek(f)
+ except EOFError as e:
+ self.seek(last_frame)
+ msg = "no more images in GIF file"
+ raise EOFError(msg) from e
+
+ def _seek(self, frame, update_image=True):
+ if frame == 0:
+ # rewind
+ self.__offset = 0
+ self.dispose = None
+ self.__frame = -1
+ self._fp.seek(self.__rewind)
+ self.disposal_method = 0
+ if "comment" in self.info:
+ del self.info["comment"]
+ else:
+ # ensure that the previous frame was loaded
+ if self.tile and update_image:
+ self.load()
+
+ if frame != self.__frame + 1:
+ msg = f"cannot seek to frame {frame}"
+ raise ValueError(msg)
+
+ self.fp = self._fp
+ if self.__offset:
+ # backup to last frame
+ self.fp.seek(self.__offset)
+ while self.data():
+ pass
+ self.__offset = 0
+
+ s = self.fp.read(1)
+ if not s or s == b";":
+ msg = "no more images in GIF file"
+ raise EOFError(msg)
+
+ palette = None
+
+ info = {}
+ frame_transparency = None
+ interlace = None
+ frame_dispose_extent = None
+ while True:
+ if not s:
+ s = self.fp.read(1)
+ if not s or s == b";":
+ break
+
+ elif s == b"!":
+ #
+ # extensions
+ #
+ s = self.fp.read(1)
+ block = self.data()
+ if s[0] == 249:
+ #
+ # graphic control extension
+ #
+ flags = block[0]
+ if flags & 1:
+ frame_transparency = block[3]
+ info["duration"] = i16(block, 1) * 10
+
+ # disposal method - find the value of bits 4 - 6
+ dispose_bits = 0b00011100 & flags
+ dispose_bits = dispose_bits >> 2
+ if dispose_bits:
+ # only set the dispose if it is not
+ # unspecified. I'm not sure if this is
+ # correct, but it seems to prevent the last
+ # frame from looking odd for some animations
+ self.disposal_method = dispose_bits
+ elif s[0] == 254:
+ #
+ # comment extension
+ #
+ comment = b""
+
+ # Read this comment block
+ while block:
+ comment += block
+ block = self.data()
+
+ if "comment" in info:
+ # If multiple comment blocks in frame, separate with \n
+ info["comment"] += b"\n" + comment
+ else:
+ info["comment"] = comment
+ s = None
+ continue
+ elif s[0] == 255 and frame == 0:
+ #
+ # application extension
+ #
+ info["extension"] = block, self.fp.tell()
+ if block[:11] == b"NETSCAPE2.0":
+ block = self.data()
+ if len(block) >= 3 and block[0] == 1:
+ self.info["loop"] = i16(block, 1)
+ while self.data():
+ pass
+
+ elif s == b",":
+ #
+ # local image
+ #
+ s = self.fp.read(9)
+
+ # extent
+ x0, y0 = i16(s, 0), i16(s, 2)
+ x1, y1 = x0 + i16(s, 4), y0 + i16(s, 6)
+ if (x1 > self.size[0] or y1 > self.size[1]) and update_image:
+ self._size = max(x1, self.size[0]), max(y1, self.size[1])
+ Image._decompression_bomb_check(self._size)
+ frame_dispose_extent = x0, y0, x1, y1
+ flags = s[8]
+
+ interlace = (flags & 64) != 0
+
+ if flags & 128:
+ bits = (flags & 7) + 1
+ p = self.fp.read(3 << bits)
+ if self._is_palette_needed(p):
+ palette = ImagePalette.raw("RGB", p)
+ else:
+ palette = False
+
+ # image data
+ bits = self.fp.read(1)[0]
+ self.__offset = self.fp.tell()
+ break
+ s = None
+
+ if interlace is None:
+ msg = "image not found in GIF frame"
+ raise EOFError(msg)
+
+ self.__frame = frame
+ if not update_image:
+ return
+
+ self.tile = []
+
+ if self.dispose:
+ self.im.paste(self.dispose, self.dispose_extent)
+
+ self._frame_palette = palette if palette is not None else self.global_palette
+ self._frame_transparency = frame_transparency
+ if frame == 0:
+ if self._frame_palette:
+ if LOADING_STRATEGY == LoadingStrategy.RGB_ALWAYS:
+ self._mode = "RGBA" if frame_transparency is not None else "RGB"
+ else:
+ self._mode = "P"
+ else:
+ self._mode = "L"
+
+ if not palette and self.global_palette:
+ from copy import copy
+
+ palette = copy(self.global_palette)
+ self.palette = palette
+ else:
+ if self.mode == "P":
+ if (
+ LOADING_STRATEGY != LoadingStrategy.RGB_AFTER_DIFFERENT_PALETTE_ONLY
+ or palette
+ ):
+ self.pyaccess = None
+ if "transparency" in self.info:
+ self.im.putpalettealpha(self.info["transparency"], 0)
+ self.im = self.im.convert("RGBA", Image.Dither.FLOYDSTEINBERG)
+ self._mode = "RGBA"
+ del self.info["transparency"]
+ else:
+ self._mode = "RGB"
+ self.im = self.im.convert("RGB", Image.Dither.FLOYDSTEINBERG)
+
+ def _rgb(color):
+ if self._frame_palette:
+ if color * 3 + 3 > len(self._frame_palette.palette):
+ color = 0
+ color = tuple(self._frame_palette.palette[color * 3 : color * 3 + 3])
+ else:
+ color = (color, color, color)
+ return color
+
+ self.dispose_extent = frame_dispose_extent
+ try:
+ if self.disposal_method < 2:
+ # do not dispose or none specified
+ self.dispose = None
+ elif self.disposal_method == 2:
+ # replace with background colour
+
+ # only dispose the extent in this frame
+ x0, y0, x1, y1 = self.dispose_extent
+ dispose_size = (x1 - x0, y1 - y0)
+
+ Image._decompression_bomb_check(dispose_size)
+
+ # by convention, attempt to use transparency first
+ dispose_mode = "P"
+ color = self.info.get("transparency", frame_transparency)
+ if color is not None:
+ if self.mode in ("RGB", "RGBA"):
+ dispose_mode = "RGBA"
+ color = _rgb(color) + (0,)
+ else:
+ color = self.info.get("background", 0)
+ if self.mode in ("RGB", "RGBA"):
+ dispose_mode = "RGB"
+ color = _rgb(color)
+ self.dispose = Image.core.fill(dispose_mode, dispose_size, color)
+ else:
+ # replace with previous contents
+ if self.im is not None:
+ # only dispose the extent in this frame
+ self.dispose = self._crop(self.im, self.dispose_extent)
+ elif frame_transparency is not None:
+ x0, y0, x1, y1 = self.dispose_extent
+ dispose_size = (x1 - x0, y1 - y0)
+
+ Image._decompression_bomb_check(dispose_size)
+ dispose_mode = "P"
+ color = frame_transparency
+ if self.mode in ("RGB", "RGBA"):
+ dispose_mode = "RGBA"
+ color = _rgb(frame_transparency) + (0,)
+ self.dispose = Image.core.fill(dispose_mode, dispose_size, color)
+ except AttributeError:
+ pass
+
+ if interlace is not None:
+ transparency = -1
+ if frame_transparency is not None:
+ if frame == 0:
+ if LOADING_STRATEGY != LoadingStrategy.RGB_ALWAYS:
+ self.info["transparency"] = frame_transparency
+ elif self.mode not in ("RGB", "RGBA"):
+ transparency = frame_transparency
+ self.tile = [
+ (
+ "gif",
+ (x0, y0, x1, y1),
+ self.__offset,
+ (bits, interlace, transparency),
+ )
+ ]
+
+ if info.get("comment"):
+ self.info["comment"] = info["comment"]
+ for k in ["duration", "extension"]:
+ if k in info:
+ self.info[k] = info[k]
+ elif k in self.info:
+ del self.info[k]
+
+ def load_prepare(self):
+ temp_mode = "P" if self._frame_palette else "L"
+ self._prev_im = None
+ if self.__frame == 0:
+ if self._frame_transparency is not None:
+ self.im = Image.core.fill(
+ temp_mode, self.size, self._frame_transparency
+ )
+ elif self.mode in ("RGB", "RGBA"):
+ self._prev_im = self.im
+ if self._frame_palette:
+ self.im = Image.core.fill("P", self.size, self._frame_transparency or 0)
+ self.im.putpalette(*self._frame_palette.getdata())
+ else:
+ self.im = None
+ self._mode = temp_mode
+ self._frame_palette = None
+
+ super().load_prepare()
+
+ def load_end(self):
+ if self.__frame == 0:
+ if self.mode == "P" and LOADING_STRATEGY == LoadingStrategy.RGB_ALWAYS:
+ if self._frame_transparency is not None:
+ self.im.putpalettealpha(self._frame_transparency, 0)
+ self._mode = "RGBA"
+ else:
+ self._mode = "RGB"
+ self.im = self.im.convert(self.mode, Image.Dither.FLOYDSTEINBERG)
+ return
+ if not self._prev_im:
+ return
+ if self._frame_transparency is not None:
+ self.im.putpalettealpha(self._frame_transparency, 0)
+ frame_im = self.im.convert("RGBA")
+ else:
+ frame_im = self.im.convert("RGB")
+ frame_im = self._crop(frame_im, self.dispose_extent)
+
+ self.im = self._prev_im
+ self._mode = self.im.mode
+ if frame_im.mode == "RGBA":
+ self.im.paste(frame_im, self.dispose_extent, frame_im)
+ else:
+ self.im.paste(frame_im, self.dispose_extent)
+
+ def tell(self):
+ return self.__frame
+
+
+# --------------------------------------------------------------------
+# Write GIF files
+
+
+RAWMODE = {"1": "L", "L": "L", "P": "P"}
+
+
+def _normalize_mode(im):
+ """
+ Takes an image (or frame), returns an image in a mode that is appropriate
+ for saving in a Gif.
+
+ It may return the original image, or it may return an image converted to
+ palette or 'L' mode.
+
+ :param im: Image object
+ :returns: Image object
+ """
+ if im.mode in RAWMODE:
+ im.load()
+ return im
+ if Image.getmodebase(im.mode) == "RGB":
+ im = im.convert("P", palette=Image.Palette.ADAPTIVE)
+ if im.palette.mode == "RGBA":
+ for rgba in im.palette.colors:
+ if rgba[3] == 0:
+ im.info["transparency"] = im.palette.colors[rgba]
+ break
+ return im
+ return im.convert("L")
+
+
+def _normalize_palette(im, palette, info):
+ """
+ Normalizes the palette for image.
+ - Sets the palette to the incoming palette, if provided.
+ - Ensures that there's a palette for L mode images
+ - Optimizes the palette if necessary/desired.
+
+ :param im: Image object
+ :param palette: bytes object containing the source palette, or ....
+ :param info: encoderinfo
+ :returns: Image object
+ """
+ source_palette = None
+ if palette:
+ # a bytes palette
+ if isinstance(palette, (bytes, bytearray, list)):
+ source_palette = bytearray(palette[:768])
+ if isinstance(palette, ImagePalette.ImagePalette):
+ source_palette = bytearray(palette.palette)
+
+ if im.mode == "P":
+ if not source_palette:
+ source_palette = im.im.getpalette("RGB")[:768]
+ else: # L-mode
+ if not source_palette:
+ source_palette = bytearray(i // 3 for i in range(768))
+ im.palette = ImagePalette.ImagePalette("RGB", palette=source_palette)
+
+ if palette:
+ used_palette_colors = []
+ for i in range(0, len(source_palette), 3):
+ source_color = tuple(source_palette[i : i + 3])
+ index = im.palette.colors.get(source_color)
+ if index in used_palette_colors:
+ index = None
+ used_palette_colors.append(index)
+ for i, index in enumerate(used_palette_colors):
+ if index is None:
+ for j in range(len(used_palette_colors)):
+ if j not in used_palette_colors:
+ used_palette_colors[i] = j
+ break
+ im = im.remap_palette(used_palette_colors)
+ else:
+ used_palette_colors = _get_optimize(im, info)
+ if used_palette_colors is not None:
+ im = im.remap_palette(used_palette_colors, source_palette)
+ if "transparency" in info:
+ try:
+ info["transparency"] = used_palette_colors.index(
+ info["transparency"]
+ )
+ except ValueError:
+ del info["transparency"]
+ return im
+
+ im.palette.palette = source_palette
+ return im
+
+
+def _write_single_frame(im, fp, palette):
+ im_out = _normalize_mode(im)
+ for k, v in im_out.info.items():
+ im.encoderinfo.setdefault(k, v)
+ im_out = _normalize_palette(im_out, palette, im.encoderinfo)
+
+ for s in _get_global_header(im_out, im.encoderinfo):
+ fp.write(s)
+
+ # local image header
+ flags = 0
+ if get_interlace(im):
+ flags = flags | 64
+ _write_local_header(fp, im, (0, 0), flags)
+
+ im_out.encoderconfig = (8, get_interlace(im))
+ ImageFile._save(im_out, fp, [("gif", (0, 0) + im.size, 0, RAWMODE[im_out.mode])])
+
+ fp.write(b"\0") # end of image data
+
+
+def _getbbox(base_im, im_frame):
+ if _get_palette_bytes(im_frame) != _get_palette_bytes(base_im):
+ im_frame = im_frame.convert("RGBA")
+ base_im = base_im.convert("RGBA")
+ delta = ImageChops.subtract_modulo(im_frame, base_im)
+ return delta, delta.getbbox(alpha_only=False)
+
+
+def _write_multiple_frames(im, fp, palette):
+ duration = im.encoderinfo.get("duration")
+ disposal = im.encoderinfo.get("disposal", im.info.get("disposal"))
+
+ im_frames = []
+ previous_im = None
+ frame_count = 0
+ background_im = None
+ for imSequence in itertools.chain([im], im.encoderinfo.get("append_images", [])):
+ for im_frame in ImageSequence.Iterator(imSequence):
+ # a copy is required here since seek can still mutate the image
+ im_frame = _normalize_mode(im_frame.copy())
+ if frame_count == 0:
+ for k, v in im_frame.info.items():
+ if k == "transparency":
+ continue
+ im.encoderinfo.setdefault(k, v)
+
+ encoderinfo = im.encoderinfo.copy()
+ if "transparency" in im_frame.info:
+ encoderinfo.setdefault("transparency", im_frame.info["transparency"])
+ im_frame = _normalize_palette(im_frame, palette, encoderinfo)
+ if isinstance(duration, (list, tuple)):
+ encoderinfo["duration"] = duration[frame_count]
+ elif duration is None and "duration" in im_frame.info:
+ encoderinfo["duration"] = im_frame.info["duration"]
+ if isinstance(disposal, (list, tuple)):
+ encoderinfo["disposal"] = disposal[frame_count]
+ frame_count += 1
+
+ diff_frame = None
+ if im_frames:
+ # delta frame
+ delta, bbox = _getbbox(previous_im, im_frame)
+ if not bbox:
+ # This frame is identical to the previous frame
+ if encoderinfo.get("duration"):
+ im_frames[-1]["encoderinfo"]["duration"] += encoderinfo[
+ "duration"
+ ]
+ continue
+ if encoderinfo.get("disposal") == 2:
+ if background_im is None:
+ color = im.encoderinfo.get(
+ "transparency", im.info.get("transparency", (0, 0, 0))
+ )
+ background = _get_background(im_frame, color)
+ background_im = Image.new("P", im_frame.size, background)
+ background_im.putpalette(im_frames[0]["im"].palette)
+ delta, bbox = _getbbox(background_im, im_frame)
+ if encoderinfo.get("optimize") and im_frame.mode != "1":
+ if "transparency" not in encoderinfo:
+ try:
+ encoderinfo[
+ "transparency"
+ ] = im_frame.palette._new_color_index(im_frame)
+ except ValueError:
+ pass
+ if "transparency" in encoderinfo:
+ # When the delta is zero, fill the image with transparency
+ diff_frame = im_frame.copy()
+ fill = Image.new(
+ "P", diff_frame.size, encoderinfo["transparency"]
+ )
+ if delta.mode == "RGBA":
+ r, g, b, a = delta.split()
+ mask = ImageMath.eval(
+ "convert(max(max(max(r, g), b), a) * 255, '1')",
+ r=r,
+ g=g,
+ b=b,
+ a=a,
+ )
+ else:
+ if delta.mode == "P":
+ # Convert to L without considering palette
+ delta_l = Image.new("L", delta.size)
+ delta_l.putdata(delta.getdata())
+ delta = delta_l
+ mask = ImageMath.eval("convert(im * 255, '1')", im=delta)
+ diff_frame.paste(fill, mask=ImageOps.invert(mask))
+ else:
+ bbox = None
+ previous_im = im_frame
+ im_frames.append(
+ {"im": diff_frame or im_frame, "bbox": bbox, "encoderinfo": encoderinfo}
+ )
+
+ if len(im_frames) == 1:
+ if "duration" in im.encoderinfo:
+ # Since multiple frames will not be written, use the combined duration
+ im.encoderinfo["duration"] = im_frames[0]["encoderinfo"]["duration"]
+ return
+
+ for frame_data in im_frames:
+ im_frame = frame_data["im"]
+ if not frame_data["bbox"]:
+ # global header
+ for s in _get_global_header(im_frame, frame_data["encoderinfo"]):
+ fp.write(s)
+ offset = (0, 0)
+ else:
+ # compress difference
+ if not palette:
+ frame_data["encoderinfo"]["include_color_table"] = True
+
+ im_frame = im_frame.crop(frame_data["bbox"])
+ offset = frame_data["bbox"][:2]
+ _write_frame_data(fp, im_frame, offset, frame_data["encoderinfo"])
+ return True
+
+
+def _save_all(im, fp, filename):
+ _save(im, fp, filename, save_all=True)
+
+
+def _save(im, fp, filename, save_all=False):
+ # header
+ if "palette" in im.encoderinfo or "palette" in im.info:
+ palette = im.encoderinfo.get("palette", im.info.get("palette"))
+ else:
+ palette = None
+ im.encoderinfo.setdefault("optimize", True)
+
+ if not save_all or not _write_multiple_frames(im, fp, palette):
+ _write_single_frame(im, fp, palette)
+
+ fp.write(b";") # end of file
+
+ if hasattr(fp, "flush"):
+ fp.flush()
+
+
+def get_interlace(im):
+ interlace = im.encoderinfo.get("interlace", 1)
+
+ # workaround for @PIL153
+ if min(im.size) < 16:
+ interlace = 0
+
+ return interlace
+
+
+def _write_local_header(fp, im, offset, flags):
+ try:
+ transparency = im.encoderinfo["transparency"]
+ except KeyError:
+ transparency = None
+
+ if "duration" in im.encoderinfo:
+ duration = int(im.encoderinfo["duration"] / 10)
+ else:
+ duration = 0
+
+ disposal = int(im.encoderinfo.get("disposal", 0))
+
+ if transparency is not None or duration != 0 or disposal:
+ packed_flag = 1 if transparency is not None else 0
+ packed_flag |= disposal << 2
+
+ fp.write(
+ b"!"
+ + o8(249) # extension intro
+ + o8(4) # length
+ + o8(packed_flag) # packed fields
+ + o16(duration) # duration
+ + o8(transparency or 0) # transparency index
+ + o8(0)
+ )
+
+ include_color_table = im.encoderinfo.get("include_color_table")
+ if include_color_table:
+ palette_bytes = _get_palette_bytes(im)
+ color_table_size = _get_color_table_size(palette_bytes)
+ if color_table_size:
+ flags = flags | 128 # local color table flag
+ flags = flags | color_table_size
+
+ fp.write(
+ b","
+ + o16(offset[0]) # offset
+ + o16(offset[1])
+ + o16(im.size[0]) # size
+ + o16(im.size[1])
+ + o8(flags) # flags
+ )
+ if include_color_table and color_table_size:
+ fp.write(_get_header_palette(palette_bytes))
+ fp.write(o8(8)) # bits
+
+
+def _save_netpbm(im, fp, filename):
+ # Unused by default.
+ # To use, uncomment the register_save call at the end of the file.
+ #
+ # If you need real GIF compression and/or RGB quantization, you
+ # can use the external NETPBM/PBMPLUS utilities. See comments
+ # below for information on how to enable this.
+ tempfile = im._dump()
+
+ try:
+ with open(filename, "wb") as f:
+ if im.mode != "RGB":
+ subprocess.check_call(
+ ["ppmtogif", tempfile], stdout=f, stderr=subprocess.DEVNULL
+ )
+ else:
+ # Pipe ppmquant output into ppmtogif
+ # "ppmquant 256 %s | ppmtogif > %s" % (tempfile, filename)
+ quant_cmd = ["ppmquant", "256", tempfile]
+ togif_cmd = ["ppmtogif"]
+ quant_proc = subprocess.Popen(
+ quant_cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL
+ )
+ togif_proc = subprocess.Popen(
+ togif_cmd,
+ stdin=quant_proc.stdout,
+ stdout=f,
+ stderr=subprocess.DEVNULL,
+ )
+
+ # Allow ppmquant to receive SIGPIPE if ppmtogif exits
+ quant_proc.stdout.close()
+
+ retcode = quant_proc.wait()
+ if retcode:
+ raise subprocess.CalledProcessError(retcode, quant_cmd)
+
+ retcode = togif_proc.wait()
+ if retcode:
+ raise subprocess.CalledProcessError(retcode, togif_cmd)
+ finally:
+ try:
+ os.unlink(tempfile)
+ except OSError:
+ pass
+
+
+# Force optimization so that we can test performance against
+# cases where it took lots of memory and time previously.
+_FORCE_OPTIMIZE = False
+
+
+def _get_optimize(im, info):
+ """
+ Palette optimization is a potentially expensive operation.
+
+ This function determines if the palette should be optimized using
+ some heuristics, then returns the list of palette entries in use.
+
+ :param im: Image object
+ :param info: encoderinfo
+ :returns: list of indexes of palette entries in use, or None
+ """
+ if im.mode in ("P", "L") and info and info.get("optimize"):
+ # Potentially expensive operation.
+
+ # The palette saves 3 bytes per color not used, but palette
+ # lengths are restricted to 3*(2**N) bytes. Max saving would
+ # be 768 -> 6 bytes if we went all the way down to 2 colors.
+ # * If we're over 128 colors, we can't save any space.
+ # * If there aren't any holes, it's not worth collapsing.
+ # * If we have a 'large' image, the palette is in the noise.
+
+ # create the new palette if not every color is used
+ optimise = _FORCE_OPTIMIZE or im.mode == "L"
+ if optimise or im.width * im.height < 512 * 512:
+ # check which colors are used
+ used_palette_colors = []
+ for i, count in enumerate(im.histogram()):
+ if count:
+ used_palette_colors.append(i)
+
+ if optimise or max(used_palette_colors) >= len(used_palette_colors):
+ return used_palette_colors
+
+ num_palette_colors = len(im.palette.palette) // Image.getmodebands(
+ im.palette.mode
+ )
+ current_palette_size = 1 << (num_palette_colors - 1).bit_length()
+ if (
+ # check that the palette would become smaller when saved
+ len(used_palette_colors) <= current_palette_size // 2
+ # check that the palette is not already the smallest possible size
+ and current_palette_size > 2
+ ):
+ return used_palette_colors
+
+
+def _get_color_table_size(palette_bytes):
+ # calculate the palette size for the header
+ if not palette_bytes:
+ return 0
+ elif len(palette_bytes) < 9:
+ return 1
+ else:
+ return math.ceil(math.log(len(palette_bytes) // 3, 2)) - 1
+
+
+def _get_header_palette(palette_bytes):
+ """
+ Returns the palette, null padded to the next power of 2 (*3) bytes
+ suitable for direct inclusion in the GIF header
+
+ :param palette_bytes: Unpadded palette bytes, in RGBRGB form
+ :returns: Null padded palette
+ """
+ color_table_size = _get_color_table_size(palette_bytes)
+
+ # add the missing amount of bytes
+ # the palette has to be 2< 0:
+ palette_bytes += o8(0) * 3 * actual_target_size_diff
+ return palette_bytes
+
+
+def _get_palette_bytes(im):
+ """
+ Gets the palette for inclusion in the gif header
+
+ :param im: Image object
+ :returns: Bytes, len<=768 suitable for inclusion in gif header
+ """
+ return im.palette.palette if im.palette else b""
+
+
+def _get_background(im, info_background):
+ background = 0
+ if info_background:
+ if isinstance(info_background, tuple):
+ # WebPImagePlugin stores an RGBA value in info["background"]
+ # So it must be converted to the same format as GifImagePlugin's
+ # info["background"] - a global color table index
+ try:
+ background = im.palette.getcolor(info_background, im)
+ except ValueError as e:
+ if str(e) not in (
+ # If all 256 colors are in use,
+ # then there is no need for the background color
+ "cannot allocate more than 256 colors",
+ # Ignore non-opaque WebP background
+ "cannot add non-opaque RGBA color to RGB palette",
+ ):
+ raise
+ else:
+ background = info_background
+ return background
+
+
+def _get_global_header(im, info):
+ """Return a list of strings representing a GIF header"""
+
+ # Header Block
+ # https://www.matthewflickinger.com/lab/whatsinagif/bits_and_bytes.asp
+
+ version = b"87a"
+ if im.info.get("version") == b"89a" or (
+ info
+ and (
+ "transparency" in info
+ or info.get("loop") is not None
+ or info.get("duration")
+ or info.get("comment")
+ )
+ ):
+ version = b"89a"
+
+ background = _get_background(im, info.get("background"))
+
+ palette_bytes = _get_palette_bytes(im)
+ color_table_size = _get_color_table_size(palette_bytes)
+
+ header = [
+ b"GIF" # signature
+ + version # version
+ + o16(im.size[0]) # canvas width
+ + o16(im.size[1]), # canvas height
+ # Logical Screen Descriptor
+ # size of global color table + global color table flag
+ o8(color_table_size + 128), # packed fields
+ # background + reserved/aspect
+ o8(background) + o8(0),
+ # Global Color Table
+ _get_header_palette(palette_bytes),
+ ]
+ if info.get("loop") is not None:
+ header.append(
+ b"!"
+ + o8(255) # extension intro
+ + o8(11)
+ + b"NETSCAPE2.0"
+ + o8(3)
+ + o8(1)
+ + o16(info["loop"]) # number of loops
+ + o8(0)
+ )
+ if info.get("comment"):
+ comment_block = b"!" + o8(254) # extension intro
+
+ comment = info["comment"]
+ if isinstance(comment, str):
+ comment = comment.encode()
+ for i in range(0, len(comment), 255):
+ subblock = comment[i : i + 255]
+ comment_block += o8(len(subblock)) + subblock
+
+ comment_block += o8(0)
+ header.append(comment_block)
+ return header
+
+
+def _write_frame_data(fp, im_frame, offset, params):
+ try:
+ im_frame.encoderinfo = params
+
+ # local image header
+ _write_local_header(fp, im_frame, offset, 0)
+
+ ImageFile._save(
+ im_frame, fp, [("gif", (0, 0) + im_frame.size, 0, RAWMODE[im_frame.mode])]
+ )
+
+ fp.write(b"\0") # end of image data
+ finally:
+ del im_frame.encoderinfo
+
+
+# --------------------------------------------------------------------
+# Legacy GIF utilities
+
+
+def getheader(im, palette=None, info=None):
+ """
+ Legacy Method to get Gif data from image.
+
+ Warning:: May modify image data.
+
+ :param im: Image object
+ :param palette: bytes object containing the source palette, or ....
+ :param info: encoderinfo
+ :returns: tuple of(list of header items, optimized palette)
+
+ """
+ used_palette_colors = _get_optimize(im, info)
+
+ if info is None:
+ info = {}
+
+ if "background" not in info and "background" in im.info:
+ info["background"] = im.info["background"]
+
+ im_mod = _normalize_palette(im, palette, info)
+ im.palette = im_mod.palette
+ im.im = im_mod.im
+ header = _get_global_header(im, info)
+
+ return header, used_palette_colors
+
+
+def getdata(im, offset=(0, 0), **params):
+ """
+ Legacy Method
+
+ Return a list of strings representing this image.
+ The first string is a local image header, the rest contains
+ encoded image data.
+
+ To specify duration, add the time in milliseconds,
+ e.g. ``getdata(im_frame, duration=1000)``
+
+ :param im: Image object
+ :param offset: Tuple of (x, y) pixels. Defaults to (0, 0)
+ :param \\**params: e.g. duration or other encoder info parameters
+ :returns: List of bytes containing GIF encoded frame data
+
+ """
+
+ class Collector:
+ data = []
+
+ def write(self, data):
+ self.data.append(data)
+
+ im.load() # make sure raster data is available
+
+ fp = Collector()
+
+ _write_frame_data(fp, im, offset, params)
+
+ return fp.data
+
+
+# --------------------------------------------------------------------
+# Registry
+
+Image.register_open(GifImageFile.format, GifImageFile, _accept)
+Image.register_save(GifImageFile.format, _save)
+Image.register_save_all(GifImageFile.format, _save_all)
+Image.register_extension(GifImageFile.format, ".gif")
+Image.register_mime(GifImageFile.format, "image/gif")
+
+#
+# Uncomment the following line if you wish to use NETPBM/PBMPLUS
+# instead of the built-in "uncompressed" GIF encoder
+
+# Image.register_save(GifImageFile.format, _save_netpbm)
diff --git a/Lib/site-packages/PIL/GimpGradientFile.py b/Lib/site-packages/PIL/GimpGradientFile.py
new file mode 100644
index 0000000..2d8c78e
--- /dev/null
+++ b/Lib/site-packages/PIL/GimpGradientFile.py
@@ -0,0 +1,137 @@
+#
+# Python Imaging Library
+# $Id$
+#
+# stuff to read (and render) GIMP gradient files
+#
+# History:
+# 97-08-23 fl Created
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1997.
+#
+# See the README file for information on usage and redistribution.
+#
+
+"""
+Stuff to translate curve segments to palette values (derived from
+the corresponding code in GIMP, written by Federico Mena Quintero.
+See the GIMP distribution for more information.)
+"""
+from __future__ import annotations
+
+from math import log, pi, sin, sqrt
+
+from ._binary import o8
+
+EPSILON = 1e-10
+"""""" # Enable auto-doc for data member
+
+
+def linear(middle, pos):
+ if pos <= middle:
+ if middle < EPSILON:
+ return 0.0
+ else:
+ return 0.5 * pos / middle
+ else:
+ pos = pos - middle
+ middle = 1.0 - middle
+ if middle < EPSILON:
+ return 1.0
+ else:
+ return 0.5 + 0.5 * pos / middle
+
+
+def curved(middle, pos):
+ return pos ** (log(0.5) / log(max(middle, EPSILON)))
+
+
+def sine(middle, pos):
+ return (sin((-pi / 2.0) + pi * linear(middle, pos)) + 1.0) / 2.0
+
+
+def sphere_increasing(middle, pos):
+ return sqrt(1.0 - (linear(middle, pos) - 1.0) ** 2)
+
+
+def sphere_decreasing(middle, pos):
+ return 1.0 - sqrt(1.0 - linear(middle, pos) ** 2)
+
+
+SEGMENTS = [linear, curved, sine, sphere_increasing, sphere_decreasing]
+"""""" # Enable auto-doc for data member
+
+
+class GradientFile:
+ gradient = None
+
+ def getpalette(self, entries=256):
+ palette = []
+
+ ix = 0
+ x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix]
+
+ for i in range(entries):
+ x = i / (entries - 1)
+
+ while x1 < x:
+ ix += 1
+ x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix]
+
+ w = x1 - x0
+
+ if w < EPSILON:
+ scale = segment(0.5, 0.5)
+ else:
+ scale = segment((xm - x0) / w, (x - x0) / w)
+
+ # expand to RGBA
+ r = o8(int(255 * ((rgb1[0] - rgb0[0]) * scale + rgb0[0]) + 0.5))
+ g = o8(int(255 * ((rgb1[1] - rgb0[1]) * scale + rgb0[1]) + 0.5))
+ b = o8(int(255 * ((rgb1[2] - rgb0[2]) * scale + rgb0[2]) + 0.5))
+ a = o8(int(255 * ((rgb1[3] - rgb0[3]) * scale + rgb0[3]) + 0.5))
+
+ # add to palette
+ palette.append(r + g + b + a)
+
+ return b"".join(palette), "RGBA"
+
+
+class GimpGradientFile(GradientFile):
+ """File handler for GIMP's gradient format."""
+
+ def __init__(self, fp):
+ if fp.readline()[:13] != b"GIMP Gradient":
+ msg = "not a GIMP gradient file"
+ raise SyntaxError(msg)
+
+ line = fp.readline()
+
+ # GIMP 1.2 gradient files don't contain a name, but GIMP 1.3 files do
+ if line.startswith(b"Name: "):
+ line = fp.readline().strip()
+
+ count = int(line)
+
+ gradient = []
+
+ for i in range(count):
+ s = fp.readline().split()
+ w = [float(x) for x in s[:11]]
+
+ x0, x1 = w[0], w[2]
+ xm = w[1]
+ rgb0 = w[3:7]
+ rgb1 = w[7:11]
+
+ segment = SEGMENTS[int(s[11])]
+ cspace = int(s[12])
+
+ if cspace != 0:
+ msg = "cannot handle HSV colour space"
+ raise OSError(msg)
+
+ gradient.append((x0, x1, xm, rgb0, rgb1, segment))
+
+ self.gradient = gradient
diff --git a/Lib/site-packages/PIL/GimpPaletteFile.py b/Lib/site-packages/PIL/GimpPaletteFile.py
new file mode 100644
index 0000000..a3109eb
--- /dev/null
+++ b/Lib/site-packages/PIL/GimpPaletteFile.py
@@ -0,0 +1,57 @@
+#
+# Python Imaging Library
+# $Id$
+#
+# stuff to read GIMP palette files
+#
+# History:
+# 1997-08-23 fl Created
+# 2004-09-07 fl Support GIMP 2.0 palette files.
+#
+# Copyright (c) Secret Labs AB 1997-2004. All rights reserved.
+# Copyright (c) Fredrik Lundh 1997-2004.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import re
+
+from ._binary import o8
+
+
+class GimpPaletteFile:
+ """File handler for GIMP's palette format."""
+
+ rawmode = "RGB"
+
+ def __init__(self, fp):
+ self.palette = [o8(i) * 3 for i in range(256)]
+
+ if fp.readline()[:12] != b"GIMP Palette":
+ msg = "not a GIMP palette file"
+ raise SyntaxError(msg)
+
+ for i in range(256):
+ s = fp.readline()
+ if not s:
+ break
+
+ # skip fields and comment lines
+ if re.match(rb"\w+:|#", s):
+ continue
+ if len(s) > 100:
+ msg = "bad palette file"
+ raise SyntaxError(msg)
+
+ v = tuple(map(int, s.split()[:3]))
+ if len(v) != 3:
+ msg = "bad palette entry"
+ raise ValueError(msg)
+
+ self.palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2])
+
+ self.palette = b"".join(self.palette)
+
+ def getpalette(self):
+ return self.palette, self.rawmode
diff --git a/Lib/site-packages/PIL/GribStubImagePlugin.py b/Lib/site-packages/PIL/GribStubImagePlugin.py
new file mode 100644
index 0000000..f810680
--- /dev/null
+++ b/Lib/site-packages/PIL/GribStubImagePlugin.py
@@ -0,0 +1,74 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# GRIB stub adapter
+#
+# Copyright (c) 1996-2003 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+from . import Image, ImageFile
+
+_handler = None
+
+
+def register_handler(handler):
+ """
+ Install application-specific GRIB image handler.
+
+ :param handler: Handler object.
+ """
+ global _handler
+ _handler = handler
+
+
+# --------------------------------------------------------------------
+# Image adapter
+
+
+def _accept(prefix):
+ return prefix[:4] == b"GRIB" and prefix[7] == 1
+
+
+class GribStubImageFile(ImageFile.StubImageFile):
+ format = "GRIB"
+ format_description = "GRIB"
+
+ def _open(self):
+ offset = self.fp.tell()
+
+ if not _accept(self.fp.read(8)):
+ msg = "Not a GRIB file"
+ raise SyntaxError(msg)
+
+ self.fp.seek(offset)
+
+ # make something up
+ self._mode = "F"
+ self._size = 1, 1
+
+ loader = self._load()
+ if loader:
+ loader.open(self)
+
+ def _load(self):
+ return _handler
+
+
+def _save(im, fp, filename):
+ if _handler is None or not hasattr(_handler, "save"):
+ msg = "GRIB save handler not installed"
+ raise OSError(msg)
+ _handler.save(im, fp, filename)
+
+
+# --------------------------------------------------------------------
+# Registry
+
+Image.register_open(GribStubImageFile.format, GribStubImageFile, _accept)
+Image.register_save(GribStubImageFile.format, _save)
+
+Image.register_extension(GribStubImageFile.format, ".grib")
diff --git a/Lib/site-packages/PIL/Hdf5StubImagePlugin.py b/Lib/site-packages/PIL/Hdf5StubImagePlugin.py
new file mode 100644
index 0000000..65409e2
--- /dev/null
+++ b/Lib/site-packages/PIL/Hdf5StubImagePlugin.py
@@ -0,0 +1,74 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# HDF5 stub adapter
+#
+# Copyright (c) 2000-2003 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+from . import Image, ImageFile
+
+_handler = None
+
+
+def register_handler(handler):
+ """
+ Install application-specific HDF5 image handler.
+
+ :param handler: Handler object.
+ """
+ global _handler
+ _handler = handler
+
+
+# --------------------------------------------------------------------
+# Image adapter
+
+
+def _accept(prefix):
+ return prefix[:8] == b"\x89HDF\r\n\x1a\n"
+
+
+class HDF5StubImageFile(ImageFile.StubImageFile):
+ format = "HDF5"
+ format_description = "HDF5"
+
+ def _open(self):
+ offset = self.fp.tell()
+
+ if not _accept(self.fp.read(8)):
+ msg = "Not an HDF file"
+ raise SyntaxError(msg)
+
+ self.fp.seek(offset)
+
+ # make something up
+ self._mode = "F"
+ self._size = 1, 1
+
+ loader = self._load()
+ if loader:
+ loader.open(self)
+
+ def _load(self):
+ return _handler
+
+
+def _save(im, fp, filename):
+ if _handler is None or not hasattr(_handler, "save"):
+ msg = "HDF5 save handler not installed"
+ raise OSError(msg)
+ _handler.save(im, fp, filename)
+
+
+# --------------------------------------------------------------------
+# Registry
+
+Image.register_open(HDF5StubImageFile.format, HDF5StubImageFile, _accept)
+Image.register_save(HDF5StubImageFile.format, _save)
+
+Image.register_extensions(HDF5StubImageFile.format, [".h5", ".hdf"])
diff --git a/Lib/site-packages/PIL/IcnsImagePlugin.py b/Lib/site-packages/PIL/IcnsImagePlugin.py
new file mode 100644
index 0000000..d877b4e
--- /dev/null
+++ b/Lib/site-packages/PIL/IcnsImagePlugin.py
@@ -0,0 +1,400 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# macOS icns file decoder, based on icns.py by Bob Ippolito.
+#
+# history:
+# 2004-10-09 fl Turned into a PIL plugin; removed 2.3 dependencies.
+# 2020-04-04 Allow saving on all operating systems.
+#
+# Copyright (c) 2004 by Bob Ippolito.
+# Copyright (c) 2004 by Secret Labs.
+# Copyright (c) 2004 by Fredrik Lundh.
+# Copyright (c) 2014 by Alastair Houghton.
+# Copyright (c) 2020 by Pan Jing.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import io
+import os
+import struct
+import sys
+
+from . import Image, ImageFile, PngImagePlugin, features
+
+enable_jpeg2k = features.check_codec("jpg_2000")
+if enable_jpeg2k:
+ from . import Jpeg2KImagePlugin
+
+MAGIC = b"icns"
+HEADERSIZE = 8
+
+
+def nextheader(fobj):
+ return struct.unpack(">4sI", fobj.read(HEADERSIZE))
+
+
+def read_32t(fobj, start_length, size):
+ # The 128x128 icon seems to have an extra header for some reason.
+ (start, length) = start_length
+ fobj.seek(start)
+ sig = fobj.read(4)
+ if sig != b"\x00\x00\x00\x00":
+ msg = "Unknown signature, expecting 0x00000000"
+ raise SyntaxError(msg)
+ return read_32(fobj, (start + 4, length - 4), size)
+
+
+def read_32(fobj, start_length, size):
+ """
+ Read a 32bit RGB icon resource. Seems to be either uncompressed or
+ an RLE packbits-like scheme.
+ """
+ (start, length) = start_length
+ fobj.seek(start)
+ pixel_size = (size[0] * size[2], size[1] * size[2])
+ sizesq = pixel_size[0] * pixel_size[1]
+ if length == sizesq * 3:
+ # uncompressed ("RGBRGBGB")
+ indata = fobj.read(length)
+ im = Image.frombuffer("RGB", pixel_size, indata, "raw", "RGB", 0, 1)
+ else:
+ # decode image
+ im = Image.new("RGB", pixel_size, None)
+ for band_ix in range(3):
+ data = []
+ bytesleft = sizesq
+ while bytesleft > 0:
+ byte = fobj.read(1)
+ if not byte:
+ break
+ byte = byte[0]
+ if byte & 0x80:
+ blocksize = byte - 125
+ byte = fobj.read(1)
+ for i in range(blocksize):
+ data.append(byte)
+ else:
+ blocksize = byte + 1
+ data.append(fobj.read(blocksize))
+ bytesleft -= blocksize
+ if bytesleft <= 0:
+ break
+ if bytesleft != 0:
+ msg = f"Error reading channel [{repr(bytesleft)} left]"
+ raise SyntaxError(msg)
+ band = Image.frombuffer("L", pixel_size, b"".join(data), "raw", "L", 0, 1)
+ im.im.putband(band.im, band_ix)
+ return {"RGB": im}
+
+
+def read_mk(fobj, start_length, size):
+ # Alpha masks seem to be uncompressed
+ start = start_length[0]
+ fobj.seek(start)
+ pixel_size = (size[0] * size[2], size[1] * size[2])
+ sizesq = pixel_size[0] * pixel_size[1]
+ band = Image.frombuffer("L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1)
+ return {"A": band}
+
+
+def read_png_or_jpeg2000(fobj, start_length, size):
+ (start, length) = start_length
+ fobj.seek(start)
+ sig = fobj.read(12)
+ if sig[:8] == b"\x89PNG\x0d\x0a\x1a\x0a":
+ fobj.seek(start)
+ im = PngImagePlugin.PngImageFile(fobj)
+ Image._decompression_bomb_check(im.size)
+ return {"RGBA": im}
+ elif (
+ sig[:4] == b"\xff\x4f\xff\x51"
+ or sig[:4] == b"\x0d\x0a\x87\x0a"
+ or sig == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a"
+ ):
+ if not enable_jpeg2k:
+ msg = (
+ "Unsupported icon subimage format (rebuild PIL "
+ "with JPEG 2000 support to fix this)"
+ )
+ raise ValueError(msg)
+ # j2k, jpc or j2c
+ fobj.seek(start)
+ jp2kstream = fobj.read(length)
+ f = io.BytesIO(jp2kstream)
+ im = Jpeg2KImagePlugin.Jpeg2KImageFile(f)
+ Image._decompression_bomb_check(im.size)
+ if im.mode != "RGBA":
+ im = im.convert("RGBA")
+ return {"RGBA": im}
+ else:
+ msg = "Unsupported icon subimage format"
+ raise ValueError(msg)
+
+
+class IcnsFile:
+ SIZES = {
+ (512, 512, 2): [(b"ic10", read_png_or_jpeg2000)],
+ (512, 512, 1): [(b"ic09", read_png_or_jpeg2000)],
+ (256, 256, 2): [(b"ic14", read_png_or_jpeg2000)],
+ (256, 256, 1): [(b"ic08", read_png_or_jpeg2000)],
+ (128, 128, 2): [(b"ic13", read_png_or_jpeg2000)],
+ (128, 128, 1): [
+ (b"ic07", read_png_or_jpeg2000),
+ (b"it32", read_32t),
+ (b"t8mk", read_mk),
+ ],
+ (64, 64, 1): [(b"icp6", read_png_or_jpeg2000)],
+ (32, 32, 2): [(b"ic12", read_png_or_jpeg2000)],
+ (48, 48, 1): [(b"ih32", read_32), (b"h8mk", read_mk)],
+ (32, 32, 1): [
+ (b"icp5", read_png_or_jpeg2000),
+ (b"il32", read_32),
+ (b"l8mk", read_mk),
+ ],
+ (16, 16, 2): [(b"ic11", read_png_or_jpeg2000)],
+ (16, 16, 1): [
+ (b"icp4", read_png_or_jpeg2000),
+ (b"is32", read_32),
+ (b"s8mk", read_mk),
+ ],
+ }
+
+ def __init__(self, fobj):
+ """
+ fobj is a file-like object as an icns resource
+ """
+ # signature : (start, length)
+ self.dct = dct = {}
+ self.fobj = fobj
+ sig, filesize = nextheader(fobj)
+ if not _accept(sig):
+ msg = "not an icns file"
+ raise SyntaxError(msg)
+ i = HEADERSIZE
+ while i < filesize:
+ sig, blocksize = nextheader(fobj)
+ if blocksize <= 0:
+ msg = "invalid block header"
+ raise SyntaxError(msg)
+ i += HEADERSIZE
+ blocksize -= HEADERSIZE
+ dct[sig] = (i, blocksize)
+ fobj.seek(blocksize, io.SEEK_CUR)
+ i += blocksize
+
+ def itersizes(self):
+ sizes = []
+ for size, fmts in self.SIZES.items():
+ for fmt, reader in fmts:
+ if fmt in self.dct:
+ sizes.append(size)
+ break
+ return sizes
+
+ def bestsize(self):
+ sizes = self.itersizes()
+ if not sizes:
+ msg = "No 32bit icon resources found"
+ raise SyntaxError(msg)
+ return max(sizes)
+
+ def dataforsize(self, size):
+ """
+ Get an icon resource as {channel: array}. Note that
+ the arrays are bottom-up like windows bitmaps and will likely
+ need to be flipped or transposed in some way.
+ """
+ dct = {}
+ for code, reader in self.SIZES[size]:
+ desc = self.dct.get(code)
+ if desc is not None:
+ dct.update(reader(self.fobj, desc, size))
+ return dct
+
+ def getimage(self, size=None):
+ if size is None:
+ size = self.bestsize()
+ if len(size) == 2:
+ size = (size[0], size[1], 1)
+ channels = self.dataforsize(size)
+
+ im = channels.get("RGBA", None)
+ if im:
+ return im
+
+ im = channels.get("RGB").copy()
+ try:
+ im.putalpha(channels["A"])
+ except KeyError:
+ pass
+ return im
+
+
+##
+# Image plugin for Mac OS icons.
+
+
+class IcnsImageFile(ImageFile.ImageFile):
+ """
+ PIL image support for Mac OS .icns files.
+ Chooses the best resolution, but will possibly load
+ a different size image if you mutate the size attribute
+ before calling 'load'.
+
+ The info dictionary has a key 'sizes' that is a list
+ of sizes that the icns file has.
+ """
+
+ format = "ICNS"
+ format_description = "Mac OS icns resource"
+
+ def _open(self):
+ self.icns = IcnsFile(self.fp)
+ self._mode = "RGBA"
+ self.info["sizes"] = self.icns.itersizes()
+ self.best_size = self.icns.bestsize()
+ self.size = (
+ self.best_size[0] * self.best_size[2],
+ self.best_size[1] * self.best_size[2],
+ )
+
+ @property
+ def size(self):
+ return self._size
+
+ @size.setter
+ def size(self, value):
+ info_size = value
+ if info_size not in self.info["sizes"] and len(info_size) == 2:
+ info_size = (info_size[0], info_size[1], 1)
+ if (
+ info_size not in self.info["sizes"]
+ and len(info_size) == 3
+ and info_size[2] == 1
+ ):
+ simple_sizes = [
+ (size[0] * size[2], size[1] * size[2]) for size in self.info["sizes"]
+ ]
+ if value in simple_sizes:
+ info_size = self.info["sizes"][simple_sizes.index(value)]
+ if info_size not in self.info["sizes"]:
+ msg = "This is not one of the allowed sizes of this image"
+ raise ValueError(msg)
+ self._size = value
+
+ def load(self):
+ if len(self.size) == 3:
+ self.best_size = self.size
+ self.size = (
+ self.best_size[0] * self.best_size[2],
+ self.best_size[1] * self.best_size[2],
+ )
+
+ px = Image.Image.load(self)
+ if self.im is not None and self.im.size == self.size:
+ # Already loaded
+ return px
+ self.load_prepare()
+ # This is likely NOT the best way to do it, but whatever.
+ im = self.icns.getimage(self.best_size)
+
+ # If this is a PNG or JPEG 2000, it won't be loaded yet
+ px = im.load()
+
+ self.im = im.im
+ self._mode = im.mode
+ self.size = im.size
+
+ return px
+
+
+def _save(im, fp, filename):
+ """
+ Saves the image as a series of PNG files,
+ that are then combined into a .icns file.
+ """
+ if hasattr(fp, "flush"):
+ fp.flush()
+
+ sizes = {
+ b"ic07": 128,
+ b"ic08": 256,
+ b"ic09": 512,
+ b"ic10": 1024,
+ b"ic11": 32,
+ b"ic12": 64,
+ b"ic13": 256,
+ b"ic14": 512,
+ }
+ provided_images = {im.width: im for im in im.encoderinfo.get("append_images", [])}
+ size_streams = {}
+ for size in set(sizes.values()):
+ image = (
+ provided_images[size]
+ if size in provided_images
+ else im.resize((size, size))
+ )
+
+ temp = io.BytesIO()
+ image.save(temp, "png")
+ size_streams[size] = temp.getvalue()
+
+ entries = []
+ for type, size in sizes.items():
+ stream = size_streams[size]
+ entries.append(
+ {"type": type, "size": HEADERSIZE + len(stream), "stream": stream}
+ )
+
+ # Header
+ fp.write(MAGIC)
+ file_length = HEADERSIZE # Header
+ file_length += HEADERSIZE + 8 * len(entries) # TOC
+ file_length += sum(entry["size"] for entry in entries)
+ fp.write(struct.pack(">i", file_length))
+
+ # TOC
+ fp.write(b"TOC ")
+ fp.write(struct.pack(">i", HEADERSIZE + len(entries) * HEADERSIZE))
+ for entry in entries:
+ fp.write(entry["type"])
+ fp.write(struct.pack(">i", entry["size"]))
+
+ # Data
+ for entry in entries:
+ fp.write(entry["type"])
+ fp.write(struct.pack(">i", entry["size"]))
+ fp.write(entry["stream"])
+
+ if hasattr(fp, "flush"):
+ fp.flush()
+
+
+def _accept(prefix):
+ return prefix[:4] == MAGIC
+
+
+Image.register_open(IcnsImageFile.format, IcnsImageFile, _accept)
+Image.register_extension(IcnsImageFile.format, ".icns")
+
+Image.register_save(IcnsImageFile.format, _save)
+Image.register_mime(IcnsImageFile.format, "image/icns")
+
+if __name__ == "__main__":
+ if len(sys.argv) < 2:
+ print("Syntax: python3 IcnsImagePlugin.py [file]")
+ sys.exit()
+
+ with open(sys.argv[1], "rb") as fp:
+ imf = IcnsImageFile(fp)
+ for size in imf.info["sizes"]:
+ width, height, scale = imf.size = size
+ imf.save(f"out-{width}-{height}-{scale}.png")
+ with Image.open(sys.argv[1]) as im:
+ im.save("out.png")
+ if sys.platform == "windows":
+ os.startfile("out.png")
diff --git a/Lib/site-packages/PIL/IcoImagePlugin.py b/Lib/site-packages/PIL/IcoImagePlugin.py
new file mode 100644
index 0000000..1b22f86
--- /dev/null
+++ b/Lib/site-packages/PIL/IcoImagePlugin.py
@@ -0,0 +1,356 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# Windows Icon support for PIL
+#
+# History:
+# 96-05-27 fl Created
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1996.
+#
+# See the README file for information on usage and redistribution.
+#
+
+# This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis
+# .
+# https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki
+#
+# Icon format references:
+# * https://en.wikipedia.org/wiki/ICO_(file_format)
+# * https://msdn.microsoft.com/en-us/library/ms997538.aspx
+from __future__ import annotations
+
+import warnings
+from io import BytesIO
+from math import ceil, log
+
+from . import BmpImagePlugin, Image, ImageFile, PngImagePlugin
+from ._binary import i16le as i16
+from ._binary import i32le as i32
+from ._binary import o8
+from ._binary import o16le as o16
+from ._binary import o32le as o32
+
+#
+# --------------------------------------------------------------------
+
+_MAGIC = b"\0\0\1\0"
+
+
+def _save(im, fp, filename):
+ fp.write(_MAGIC) # (2+2)
+ bmp = im.encoderinfo.get("bitmap_format") == "bmp"
+ sizes = im.encoderinfo.get(
+ "sizes",
+ [(16, 16), (24, 24), (32, 32), (48, 48), (64, 64), (128, 128), (256, 256)],
+ )
+ frames = []
+ provided_ims = [im] + im.encoderinfo.get("append_images", [])
+ width, height = im.size
+ for size in sorted(set(sizes)):
+ if size[0] > width or size[1] > height or size[0] > 256 or size[1] > 256:
+ continue
+
+ for provided_im in provided_ims:
+ if provided_im.size != size:
+ continue
+ frames.append(provided_im)
+ if bmp:
+ bits = BmpImagePlugin.SAVE[provided_im.mode][1]
+ bits_used = [bits]
+ for other_im in provided_ims:
+ if other_im.size != size:
+ continue
+ bits = BmpImagePlugin.SAVE[other_im.mode][1]
+ if bits not in bits_used:
+ # Another image has been supplied for this size
+ # with a different bit depth
+ frames.append(other_im)
+ bits_used.append(bits)
+ break
+ else:
+ # TODO: invent a more convenient method for proportional scalings
+ frame = provided_im.copy()
+ frame.thumbnail(size, Image.Resampling.LANCZOS, reducing_gap=None)
+ frames.append(frame)
+ fp.write(o16(len(frames))) # idCount(2)
+ offset = fp.tell() + len(frames) * 16
+ for frame in frames:
+ width, height = frame.size
+ # 0 means 256
+ fp.write(o8(width if width < 256 else 0)) # bWidth(1)
+ fp.write(o8(height if height < 256 else 0)) # bHeight(1)
+
+ bits, colors = BmpImagePlugin.SAVE[frame.mode][1:] if bmp else (32, 0)
+ fp.write(o8(colors)) # bColorCount(1)
+ fp.write(b"\0") # bReserved(1)
+ fp.write(b"\0\0") # wPlanes(2)
+ fp.write(o16(bits)) # wBitCount(2)
+
+ image_io = BytesIO()
+ if bmp:
+ frame.save(image_io, "dib")
+
+ if bits != 32:
+ and_mask = Image.new("1", size)
+ ImageFile._save(
+ and_mask, image_io, [("raw", (0, 0) + size, 0, ("1", 0, -1))]
+ )
+ else:
+ frame.save(image_io, "png")
+ image_io.seek(0)
+ image_bytes = image_io.read()
+ if bmp:
+ image_bytes = image_bytes[:8] + o32(height * 2) + image_bytes[12:]
+ bytes_len = len(image_bytes)
+ fp.write(o32(bytes_len)) # dwBytesInRes(4)
+ fp.write(o32(offset)) # dwImageOffset(4)
+ current = fp.tell()
+ fp.seek(offset)
+ fp.write(image_bytes)
+ offset = offset + bytes_len
+ fp.seek(current)
+
+
+def _accept(prefix):
+ return prefix[:4] == _MAGIC
+
+
+class IcoFile:
+ def __init__(self, buf):
+ """
+ Parse image from file-like object containing ico file data
+ """
+
+ # check magic
+ s = buf.read(6)
+ if not _accept(s):
+ msg = "not an ICO file"
+ raise SyntaxError(msg)
+
+ self.buf = buf
+ self.entry = []
+
+ # Number of items in file
+ self.nb_items = i16(s, 4)
+
+ # Get headers for each item
+ for i in range(self.nb_items):
+ s = buf.read(16)
+
+ icon_header = {
+ "width": s[0],
+ "height": s[1],
+ "nb_color": s[2], # No. of colors in image (0 if >=8bpp)
+ "reserved": s[3],
+ "planes": i16(s, 4),
+ "bpp": i16(s, 6),
+ "size": i32(s, 8),
+ "offset": i32(s, 12),
+ }
+
+ # See Wikipedia
+ for j in ("width", "height"):
+ if not icon_header[j]:
+ icon_header[j] = 256
+
+ # See Wikipedia notes about color depth.
+ # We need this just to differ images with equal sizes
+ icon_header["color_depth"] = (
+ icon_header["bpp"]
+ or (
+ icon_header["nb_color"] != 0
+ and ceil(log(icon_header["nb_color"], 2))
+ )
+ or 256
+ )
+
+ icon_header["dim"] = (icon_header["width"], icon_header["height"])
+ icon_header["square"] = icon_header["width"] * icon_header["height"]
+
+ self.entry.append(icon_header)
+
+ self.entry = sorted(self.entry, key=lambda x: x["color_depth"])
+ # ICO images are usually squares
+ self.entry = sorted(self.entry, key=lambda x: x["square"], reverse=True)
+
+ def sizes(self):
+ """
+ Get a list of all available icon sizes and color depths.
+ """
+ return {(h["width"], h["height"]) for h in self.entry}
+
+ def getentryindex(self, size, bpp=False):
+ for i, h in enumerate(self.entry):
+ if size == h["dim"] and (bpp is False or bpp == h["color_depth"]):
+ return i
+ return 0
+
+ def getimage(self, size, bpp=False):
+ """
+ Get an image from the icon
+ """
+ return self.frame(self.getentryindex(size, bpp))
+
+ def frame(self, idx):
+ """
+ Get an image from frame idx
+ """
+
+ header = self.entry[idx]
+
+ self.buf.seek(header["offset"])
+ data = self.buf.read(8)
+ self.buf.seek(header["offset"])
+
+ if data[:8] == PngImagePlugin._MAGIC:
+ # png frame
+ im = PngImagePlugin.PngImageFile(self.buf)
+ Image._decompression_bomb_check(im.size)
+ else:
+ # XOR + AND mask bmp frame
+ im = BmpImagePlugin.DibImageFile(self.buf)
+ Image._decompression_bomb_check(im.size)
+
+ # change tile dimension to only encompass XOR image
+ im._size = (im.size[0], int(im.size[1] / 2))
+ d, e, o, a = im.tile[0]
+ im.tile[0] = d, (0, 0) + im.size, o, a
+
+ # figure out where AND mask image starts
+ bpp = header["bpp"]
+ if 32 == bpp:
+ # 32-bit color depth icon image allows semitransparent areas
+ # PIL's DIB format ignores transparency bits, recover them.
+ # The DIB is packed in BGRX byte order where X is the alpha
+ # channel.
+
+ # Back up to start of bmp data
+ self.buf.seek(o)
+ # extract every 4th byte (eg. 3,7,11,15,...)
+ alpha_bytes = self.buf.read(im.size[0] * im.size[1] * 4)[3::4]
+
+ # convert to an 8bpp grayscale image
+ mask = Image.frombuffer(
+ "L", # 8bpp
+ im.size, # (w, h)
+ alpha_bytes, # source chars
+ "raw", # raw decoder
+ ("L", 0, -1), # 8bpp inverted, unpadded, reversed
+ )
+ else:
+ # get AND image from end of bitmap
+ w = im.size[0]
+ if (w % 32) > 0:
+ # bitmap row data is aligned to word boundaries
+ w += 32 - (im.size[0] % 32)
+
+ # the total mask data is
+ # padded row size * height / bits per char
+
+ total_bytes = int((w * im.size[1]) / 8)
+ and_mask_offset = header["offset"] + header["size"] - total_bytes
+
+ self.buf.seek(and_mask_offset)
+ mask_data = self.buf.read(total_bytes)
+
+ # convert raw data to image
+ mask = Image.frombuffer(
+ "1", # 1 bpp
+ im.size, # (w, h)
+ mask_data, # source chars
+ "raw", # raw decoder
+ ("1;I", int(w / 8), -1), # 1bpp inverted, padded, reversed
+ )
+
+ # now we have two images, im is XOR image and mask is AND image
+
+ # apply mask image as alpha channel
+ im = im.convert("RGBA")
+ im.putalpha(mask)
+
+ return im
+
+
+##
+# Image plugin for Windows Icon files.
+
+
+class IcoImageFile(ImageFile.ImageFile):
+ """
+ PIL read-only image support for Microsoft Windows .ico files.
+
+ By default the largest resolution image in the file will be loaded. This
+ can be changed by altering the 'size' attribute before calling 'load'.
+
+ The info dictionary has a key 'sizes' that is a list of the sizes available
+ in the icon file.
+
+ Handles classic, XP and Vista icon formats.
+
+ When saving, PNG compression is used. Support for this was only added in
+ Windows Vista. If you are unable to view the icon in Windows, convert the
+ image to "RGBA" mode before saving.
+
+ This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis
+ .
+ https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki
+ """
+
+ format = "ICO"
+ format_description = "Windows Icon"
+
+ def _open(self):
+ self.ico = IcoFile(self.fp)
+ self.info["sizes"] = self.ico.sizes()
+ self.size = self.ico.entry[0]["dim"]
+ self.load()
+
+ @property
+ def size(self):
+ return self._size
+
+ @size.setter
+ def size(self, value):
+ if value not in self.info["sizes"]:
+ msg = "This is not one of the allowed sizes of this image"
+ raise ValueError(msg)
+ self._size = value
+
+ def load(self):
+ if self.im is not None and self.im.size == self.size:
+ # Already loaded
+ return Image.Image.load(self)
+ im = self.ico.getimage(self.size)
+ # if tile is PNG, it won't really be loaded yet
+ im.load()
+ self.im = im.im
+ self.pyaccess = None
+ self._mode = im.mode
+ if im.size != self.size:
+ warnings.warn("Image was not the expected size")
+
+ index = self.ico.getentryindex(self.size)
+ sizes = list(self.info["sizes"])
+ sizes[index] = im.size
+ self.info["sizes"] = set(sizes)
+
+ self.size = im.size
+
+ def load_seek(self):
+ # Flag the ImageFile.Parser so that it
+ # just does all the decode at the end.
+ pass
+
+
+#
+# --------------------------------------------------------------------
+
+
+Image.register_open(IcoImageFile.format, IcoImageFile, _accept)
+Image.register_save(IcoImageFile.format, _save)
+Image.register_extension(IcoImageFile.format, ".ico")
+
+Image.register_mime(IcoImageFile.format, "image/x-icon")
diff --git a/Lib/site-packages/PIL/ImImagePlugin.py b/Lib/site-packages/PIL/ImImagePlugin.py
new file mode 100644
index 0000000..97d726a
--- /dev/null
+++ b/Lib/site-packages/PIL/ImImagePlugin.py
@@ -0,0 +1,371 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# IFUNC IM file handling for PIL
+#
+# history:
+# 1995-09-01 fl Created.
+# 1997-01-03 fl Save palette images
+# 1997-01-08 fl Added sequence support
+# 1997-01-23 fl Added P and RGB save support
+# 1997-05-31 fl Read floating point images
+# 1997-06-22 fl Save floating point images
+# 1997-08-27 fl Read and save 1-bit images
+# 1998-06-25 fl Added support for RGB+LUT images
+# 1998-07-02 fl Added support for YCC images
+# 1998-07-15 fl Renamed offset attribute to avoid name clash
+# 1998-12-29 fl Added I;16 support
+# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7)
+# 2003-09-26 fl Added LA/PA support
+#
+# Copyright (c) 1997-2003 by Secret Labs AB.
+# Copyright (c) 1995-2001 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import os
+import re
+
+from . import Image, ImageFile, ImagePalette
+
+# --------------------------------------------------------------------
+# Standard tags
+
+COMMENT = "Comment"
+DATE = "Date"
+EQUIPMENT = "Digitalization equipment"
+FRAMES = "File size (no of images)"
+LUT = "Lut"
+NAME = "Name"
+SCALE = "Scale (x,y)"
+SIZE = "Image size (x*y)"
+MODE = "Image type"
+
+TAGS = {
+ COMMENT: 0,
+ DATE: 0,
+ EQUIPMENT: 0,
+ FRAMES: 0,
+ LUT: 0,
+ NAME: 0,
+ SCALE: 0,
+ SIZE: 0,
+ MODE: 0,
+}
+
+OPEN = {
+ # ifunc93/p3cfunc formats
+ "0 1 image": ("1", "1"),
+ "L 1 image": ("1", "1"),
+ "Greyscale image": ("L", "L"),
+ "Grayscale image": ("L", "L"),
+ "RGB image": ("RGB", "RGB;L"),
+ "RLB image": ("RGB", "RLB"),
+ "RYB image": ("RGB", "RLB"),
+ "B1 image": ("1", "1"),
+ "B2 image": ("P", "P;2"),
+ "B4 image": ("P", "P;4"),
+ "X 24 image": ("RGB", "RGB"),
+ "L 32 S image": ("I", "I;32"),
+ "L 32 F image": ("F", "F;32"),
+ # old p3cfunc formats
+ "RGB3 image": ("RGB", "RGB;T"),
+ "RYB3 image": ("RGB", "RYB;T"),
+ # extensions
+ "LA image": ("LA", "LA;L"),
+ "PA image": ("LA", "PA;L"),
+ "RGBA image": ("RGBA", "RGBA;L"),
+ "RGBX image": ("RGBX", "RGBX;L"),
+ "CMYK image": ("CMYK", "CMYK;L"),
+ "YCC image": ("YCbCr", "YCbCr;L"),
+}
+
+# ifunc95 extensions
+for i in ["8", "8S", "16", "16S", "32", "32F"]:
+ OPEN[f"L {i} image"] = ("F", f"F;{i}")
+ OPEN[f"L*{i} image"] = ("F", f"F;{i}")
+for i in ["16", "16L", "16B"]:
+ OPEN[f"L {i} image"] = (f"I;{i}", f"I;{i}")
+ OPEN[f"L*{i} image"] = (f"I;{i}", f"I;{i}")
+for i in ["32S"]:
+ OPEN[f"L {i} image"] = ("I", f"I;{i}")
+ OPEN[f"L*{i} image"] = ("I", f"I;{i}")
+for i in range(2, 33):
+ OPEN[f"L*{i} image"] = ("F", f"F;{i}")
+
+
+# --------------------------------------------------------------------
+# Read IM directory
+
+split = re.compile(rb"^([A-Za-z][^:]*):[ \t]*(.*)[ \t]*$")
+
+
+def number(s):
+ try:
+ return int(s)
+ except ValueError:
+ return float(s)
+
+
+##
+# Image plugin for the IFUNC IM file format.
+
+
+class ImImageFile(ImageFile.ImageFile):
+ format = "IM"
+ format_description = "IFUNC Image Memory"
+ _close_exclusive_fp_after_loading = False
+
+ def _open(self):
+ # Quick rejection: if there's not an LF among the first
+ # 100 bytes, this is (probably) not a text header.
+
+ if b"\n" not in self.fp.read(100):
+ msg = "not an IM file"
+ raise SyntaxError(msg)
+ self.fp.seek(0)
+
+ n = 0
+
+ # Default values
+ self.info[MODE] = "L"
+ self.info[SIZE] = (512, 512)
+ self.info[FRAMES] = 1
+
+ self.rawmode = "L"
+
+ while True:
+ s = self.fp.read(1)
+
+ # Some versions of IFUNC uses \n\r instead of \r\n...
+ if s == b"\r":
+ continue
+
+ if not s or s == b"\0" or s == b"\x1A":
+ break
+
+ # FIXME: this may read whole file if not a text file
+ s = s + self.fp.readline()
+
+ if len(s) > 100:
+ msg = "not an IM file"
+ raise SyntaxError(msg)
+
+ if s[-2:] == b"\r\n":
+ s = s[:-2]
+ elif s[-1:] == b"\n":
+ s = s[:-1]
+
+ try:
+ m = split.match(s)
+ except re.error as e:
+ msg = "not an IM file"
+ raise SyntaxError(msg) from e
+
+ if m:
+ k, v = m.group(1, 2)
+
+ # Don't know if this is the correct encoding,
+ # but a decent guess (I guess)
+ k = k.decode("latin-1", "replace")
+ v = v.decode("latin-1", "replace")
+
+ # Convert value as appropriate
+ if k in [FRAMES, SCALE, SIZE]:
+ v = v.replace("*", ",")
+ v = tuple(map(number, v.split(",")))
+ if len(v) == 1:
+ v = v[0]
+ elif k == MODE and v in OPEN:
+ v, self.rawmode = OPEN[v]
+
+ # Add to dictionary. Note that COMMENT tags are
+ # combined into a list of strings.
+ if k == COMMENT:
+ if k in self.info:
+ self.info[k].append(v)
+ else:
+ self.info[k] = [v]
+ else:
+ self.info[k] = v
+
+ if k in TAGS:
+ n += 1
+
+ else:
+ msg = "Syntax error in IM header: " + s.decode("ascii", "replace")
+ raise SyntaxError(msg)
+
+ if not n:
+ msg = "Not an IM file"
+ raise SyntaxError(msg)
+
+ # Basic attributes
+ self._size = self.info[SIZE]
+ self._mode = self.info[MODE]
+
+ # Skip forward to start of image data
+ while s and s[:1] != b"\x1A":
+ s = self.fp.read(1)
+ if not s:
+ msg = "File truncated"
+ raise SyntaxError(msg)
+
+ if LUT in self.info:
+ # convert lookup table to palette or lut attribute
+ palette = self.fp.read(768)
+ greyscale = 1 # greyscale palette
+ linear = 1 # linear greyscale palette
+ for i in range(256):
+ if palette[i] == palette[i + 256] == palette[i + 512]:
+ if palette[i] != i:
+ linear = 0
+ else:
+ greyscale = 0
+ if self.mode in ["L", "LA", "P", "PA"]:
+ if greyscale:
+ if not linear:
+ self.lut = list(palette[:256])
+ else:
+ if self.mode in ["L", "P"]:
+ self._mode = self.rawmode = "P"
+ elif self.mode in ["LA", "PA"]:
+ self._mode = "PA"
+ self.rawmode = "PA;L"
+ self.palette = ImagePalette.raw("RGB;L", palette)
+ elif self.mode == "RGB":
+ if not greyscale or not linear:
+ self.lut = list(palette)
+
+ self.frame = 0
+
+ self.__offset = offs = self.fp.tell()
+
+ self._fp = self.fp # FIXME: hack
+
+ if self.rawmode[:2] == "F;":
+ # ifunc95 formats
+ try:
+ # use bit decoder (if necessary)
+ bits = int(self.rawmode[2:])
+ if bits not in [8, 16, 32]:
+ self.tile = [("bit", (0, 0) + self.size, offs, (bits, 8, 3, 0, -1))]
+ return
+ except ValueError:
+ pass
+
+ if self.rawmode in ["RGB;T", "RYB;T"]:
+ # Old LabEye/3PC files. Would be very surprised if anyone
+ # ever stumbled upon such a file ;-)
+ size = self.size[0] * self.size[1]
+ self.tile = [
+ ("raw", (0, 0) + self.size, offs, ("G", 0, -1)),
+ ("raw", (0, 0) + self.size, offs + size, ("R", 0, -1)),
+ ("raw", (0, 0) + self.size, offs + 2 * size, ("B", 0, -1)),
+ ]
+ else:
+ # LabEye/IFUNC files
+ self.tile = [("raw", (0, 0) + self.size, offs, (self.rawmode, 0, -1))]
+
+ @property
+ def n_frames(self):
+ return self.info[FRAMES]
+
+ @property
+ def is_animated(self):
+ return self.info[FRAMES] > 1
+
+ def seek(self, frame):
+ if not self._seek_check(frame):
+ return
+
+ self.frame = frame
+
+ if self.mode == "1":
+ bits = 1
+ else:
+ bits = 8 * len(self.mode)
+
+ size = ((self.size[0] * bits + 7) // 8) * self.size[1]
+ offs = self.__offset + frame * size
+
+ self.fp = self._fp
+
+ self.tile = [("raw", (0, 0) + self.size, offs, (self.rawmode, 0, -1))]
+
+ def tell(self):
+ return self.frame
+
+
+#
+# --------------------------------------------------------------------
+# Save IM files
+
+
+SAVE = {
+ # mode: (im type, raw mode)
+ "1": ("0 1", "1"),
+ "L": ("Greyscale", "L"),
+ "LA": ("LA", "LA;L"),
+ "P": ("Greyscale", "P"),
+ "PA": ("LA", "PA;L"),
+ "I": ("L 32S", "I;32S"),
+ "I;16": ("L 16", "I;16"),
+ "I;16L": ("L 16L", "I;16L"),
+ "I;16B": ("L 16B", "I;16B"),
+ "F": ("L 32F", "F;32F"),
+ "RGB": ("RGB", "RGB;L"),
+ "RGBA": ("RGBA", "RGBA;L"),
+ "RGBX": ("RGBX", "RGBX;L"),
+ "CMYK": ("CMYK", "CMYK;L"),
+ "YCbCr": ("YCC", "YCbCr;L"),
+}
+
+
+def _save(im, fp, filename):
+ try:
+ image_type, rawmode = SAVE[im.mode]
+ except KeyError as e:
+ msg = f"Cannot save {im.mode} images as IM"
+ raise ValueError(msg) from e
+
+ frames = im.encoderinfo.get("frames", 1)
+
+ fp.write(f"Image type: {image_type} image\r\n".encode("ascii"))
+ if filename:
+ # Each line must be 100 characters or less,
+ # or: SyntaxError("not an IM file")
+ # 8 characters are used for "Name: " and "\r\n"
+ # Keep just the filename, ditch the potentially overlong path
+ name, ext = os.path.splitext(os.path.basename(filename))
+ name = "".join([name[: 92 - len(ext)], ext])
+
+ fp.write(f"Name: {name}\r\n".encode("ascii"))
+ fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode("ascii"))
+ fp.write(f"File size (no of images): {frames}\r\n".encode("ascii"))
+ if im.mode in ["P", "PA"]:
+ fp.write(b"Lut: 1\r\n")
+ fp.write(b"\000" * (511 - fp.tell()) + b"\032")
+ if im.mode in ["P", "PA"]:
+ im_palette = im.im.getpalette("RGB", "RGB;L")
+ colors = len(im_palette) // 3
+ palette = b""
+ for i in range(3):
+ palette += im_palette[colors * i : colors * (i + 1)]
+ palette += b"\x00" * (256 - colors)
+ fp.write(palette) # 768 bytes
+ ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, -1))])
+
+
+#
+# --------------------------------------------------------------------
+# Registry
+
+
+Image.register_open(ImImageFile.format, ImImageFile)
+Image.register_save(ImImageFile.format, _save)
+
+Image.register_extension(ImImageFile.format, ".im")
diff --git a/Lib/site-packages/PIL/Image.py b/Lib/site-packages/PIL/Image.py
new file mode 100644
index 0000000..1bba9aa
--- /dev/null
+++ b/Lib/site-packages/PIL/Image.py
@@ -0,0 +1,3944 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# the Image class wrapper
+#
+# partial release history:
+# 1995-09-09 fl Created
+# 1996-03-11 fl PIL release 0.0 (proof of concept)
+# 1996-04-30 fl PIL release 0.1b1
+# 1999-07-28 fl PIL release 1.0 final
+# 2000-06-07 fl PIL release 1.1
+# 2000-10-20 fl PIL release 1.1.1
+# 2001-05-07 fl PIL release 1.1.2
+# 2002-03-15 fl PIL release 1.1.3
+# 2003-05-10 fl PIL release 1.1.4
+# 2005-03-28 fl PIL release 1.1.5
+# 2006-12-02 fl PIL release 1.1.6
+# 2009-11-15 fl PIL release 1.1.7
+#
+# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved.
+# Copyright (c) 1995-2009 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+
+from __future__ import annotations
+
+import atexit
+import builtins
+import io
+import logging
+import math
+import os
+import re
+import struct
+import sys
+import tempfile
+import warnings
+from collections.abc import Callable, MutableMapping
+from enum import IntEnum
+from pathlib import Path
+
+try:
+ from defusedxml import ElementTree
+except ImportError:
+ ElementTree = None
+
+# VERSION was removed in Pillow 6.0.0.
+# PILLOW_VERSION was removed in Pillow 9.0.0.
+# Use __version__ instead.
+from . import (
+ ExifTags,
+ ImageMode,
+ TiffTags,
+ UnidentifiedImageError,
+ __version__,
+ _plugins,
+)
+from ._binary import i32le, o32be, o32le
+from ._util import DeferredError, is_path
+
+logger = logging.getLogger(__name__)
+
+
+class DecompressionBombWarning(RuntimeWarning):
+ pass
+
+
+class DecompressionBombError(Exception):
+ pass
+
+
+# Limit to around a quarter gigabyte for a 24-bit (3 bpp) image
+MAX_IMAGE_PIXELS = int(1024 * 1024 * 1024 // 4 // 3)
+
+
+try:
+ # If the _imaging C module is not present, Pillow will not load.
+ # Note that other modules should not refer to _imaging directly;
+ # import Image and use the Image.core variable instead.
+ # Also note that Image.core is not a publicly documented interface,
+ # and should be considered private and subject to change.
+ from . import _imaging as core
+
+ if __version__ != getattr(core, "PILLOW_VERSION", None):
+ msg = (
+ "The _imaging extension was built for another version of Pillow or PIL:\n"
+ f"Core version: {getattr(core, 'PILLOW_VERSION', None)}\n"
+ f"Pillow version: {__version__}"
+ )
+ raise ImportError(msg)
+
+except ImportError as v:
+ core = DeferredError.new(ImportError("The _imaging C module is not installed."))
+ # Explanations for ways that we know we might have an import error
+ if str(v).startswith("Module use of python"):
+ # The _imaging C module is present, but not compiled for
+ # the right version (windows only). Print a warning, if
+ # possible.
+ warnings.warn(
+ "The _imaging extension was built for another version of Python.",
+ RuntimeWarning,
+ )
+ elif str(v).startswith("The _imaging extension"):
+ warnings.warn(str(v), RuntimeWarning)
+ # Fail here anyway. Don't let people run with a mostly broken Pillow.
+ # see docs/porting.rst
+ raise
+
+
+USE_CFFI_ACCESS = False
+try:
+ import cffi
+except ImportError:
+ cffi = None
+
+
+def isImageType(t):
+ """
+ Checks if an object is an image object.
+
+ .. warning::
+
+ This function is for internal use only.
+
+ :param t: object to check if it's an image
+ :returns: True if the object is an image
+ """
+ return hasattr(t, "im")
+
+
+#
+# Constants
+
+
+# transpose
+class Transpose(IntEnum):
+ FLIP_LEFT_RIGHT = 0
+ FLIP_TOP_BOTTOM = 1
+ ROTATE_90 = 2
+ ROTATE_180 = 3
+ ROTATE_270 = 4
+ TRANSPOSE = 5
+ TRANSVERSE = 6
+
+
+# transforms (also defined in Imaging.h)
+class Transform(IntEnum):
+ AFFINE = 0
+ EXTENT = 1
+ PERSPECTIVE = 2
+ QUAD = 3
+ MESH = 4
+
+
+# resampling filters (also defined in Imaging.h)
+class Resampling(IntEnum):
+ NEAREST = 0
+ BOX = 4
+ BILINEAR = 2
+ HAMMING = 5
+ BICUBIC = 3
+ LANCZOS = 1
+
+
+_filters_support = {
+ Resampling.BOX: 0.5,
+ Resampling.BILINEAR: 1.0,
+ Resampling.HAMMING: 1.0,
+ Resampling.BICUBIC: 2.0,
+ Resampling.LANCZOS: 3.0,
+}
+
+
+# dithers
+class Dither(IntEnum):
+ NONE = 0
+ ORDERED = 1 # Not yet implemented
+ RASTERIZE = 2 # Not yet implemented
+ FLOYDSTEINBERG = 3 # default
+
+
+# palettes/quantizers
+class Palette(IntEnum):
+ WEB = 0
+ ADAPTIVE = 1
+
+
+class Quantize(IntEnum):
+ MEDIANCUT = 0
+ MAXCOVERAGE = 1
+ FASTOCTREE = 2
+ LIBIMAGEQUANT = 3
+
+
+module = sys.modules[__name__]
+for enum in (Transpose, Transform, Resampling, Dither, Palette, Quantize):
+ for item in enum:
+ setattr(module, item.name, item.value)
+
+
+if hasattr(core, "DEFAULT_STRATEGY"):
+ DEFAULT_STRATEGY = core.DEFAULT_STRATEGY
+ FILTERED = core.FILTERED
+ HUFFMAN_ONLY = core.HUFFMAN_ONLY
+ RLE = core.RLE
+ FIXED = core.FIXED
+
+
+# --------------------------------------------------------------------
+# Registries
+
+ID = []
+OPEN = {}
+MIME = {}
+SAVE = {}
+SAVE_ALL = {}
+EXTENSION = {}
+DECODERS = {}
+ENCODERS = {}
+
+# --------------------------------------------------------------------
+# Modes
+
+_ENDIAN = "<" if sys.byteorder == "little" else ">"
+
+
+def _conv_type_shape(im):
+ m = ImageMode.getmode(im.mode)
+ shape = (im.height, im.width)
+ extra = len(m.bands)
+ if extra != 1:
+ shape += (extra,)
+ return shape, m.typestr
+
+
+MODES = ["1", "CMYK", "F", "HSV", "I", "L", "LAB", "P", "RGB", "RGBA", "RGBX", "YCbCr"]
+
+# raw modes that may be memory mapped. NOTE: if you change this, you
+# may have to modify the stride calculation in map.c too!
+_MAPMODES = ("L", "P", "RGBX", "RGBA", "CMYK", "I;16", "I;16L", "I;16B")
+
+
+def getmodebase(mode):
+ """
+ Gets the "base" mode for given mode. This function returns "L" for
+ images that contain grayscale data, and "RGB" for images that
+ contain color data.
+
+ :param mode: Input mode.
+ :returns: "L" or "RGB".
+ :exception KeyError: If the input mode was not a standard mode.
+ """
+ return ImageMode.getmode(mode).basemode
+
+
+def getmodetype(mode):
+ """
+ Gets the storage type mode. Given a mode, this function returns a
+ single-layer mode suitable for storing individual bands.
+
+ :param mode: Input mode.
+ :returns: "L", "I", or "F".
+ :exception KeyError: If the input mode was not a standard mode.
+ """
+ return ImageMode.getmode(mode).basetype
+
+
+def getmodebandnames(mode):
+ """
+ Gets a list of individual band names. Given a mode, this function returns
+ a tuple containing the names of individual bands (use
+ :py:method:`~PIL.Image.getmodetype` to get the mode used to store each
+ individual band.
+
+ :param mode: Input mode.
+ :returns: A tuple containing band names. The length of the tuple
+ gives the number of bands in an image of the given mode.
+ :exception KeyError: If the input mode was not a standard mode.
+ """
+ return ImageMode.getmode(mode).bands
+
+
+def getmodebands(mode):
+ """
+ Gets the number of individual bands for this mode.
+
+ :param mode: Input mode.
+ :returns: The number of bands in this mode.
+ :exception KeyError: If the input mode was not a standard mode.
+ """
+ return len(ImageMode.getmode(mode).bands)
+
+
+# --------------------------------------------------------------------
+# Helpers
+
+_initialized = 0
+
+
+def preinit():
+ """
+ Explicitly loads BMP, GIF, JPEG, PPM and PPM file format drivers.
+
+ It is called when opening or saving images.
+ """
+
+ global _initialized
+ if _initialized >= 1:
+ return
+
+ try:
+ from . import BmpImagePlugin
+
+ assert BmpImagePlugin
+ except ImportError:
+ pass
+ try:
+ from . import GifImagePlugin
+
+ assert GifImagePlugin
+ except ImportError:
+ pass
+ try:
+ from . import JpegImagePlugin
+
+ assert JpegImagePlugin
+ except ImportError:
+ pass
+ try:
+ from . import PpmImagePlugin
+
+ assert PpmImagePlugin
+ except ImportError:
+ pass
+ try:
+ from . import PngImagePlugin
+
+ assert PngImagePlugin
+ except ImportError:
+ pass
+
+ _initialized = 1
+
+
+def init():
+ """
+ Explicitly initializes the Python Imaging Library. This function
+ loads all available file format drivers.
+
+ It is called when opening or saving images if :py:meth:`~preinit()` is
+ insufficient, and by :py:meth:`~PIL.features.pilinfo`.
+ """
+
+ global _initialized
+ if _initialized >= 2:
+ return 0
+
+ parent_name = __name__.rpartition(".")[0]
+ for plugin in _plugins:
+ try:
+ logger.debug("Importing %s", plugin)
+ __import__(f"{parent_name}.{plugin}", globals(), locals(), [])
+ except ImportError as e:
+ logger.debug("Image: failed to import %s: %s", plugin, e)
+
+ if OPEN or SAVE:
+ _initialized = 2
+ return 1
+
+
+# --------------------------------------------------------------------
+# Codec factories (used by tobytes/frombytes and ImageFile.load)
+
+
+def _getdecoder(mode, decoder_name, args, extra=()):
+ # tweak arguments
+ if args is None:
+ args = ()
+ elif not isinstance(args, tuple):
+ args = (args,)
+
+ try:
+ decoder = DECODERS[decoder_name]
+ except KeyError:
+ pass
+ else:
+ return decoder(mode, *args + extra)
+
+ try:
+ # get decoder
+ decoder = getattr(core, decoder_name + "_decoder")
+ except AttributeError as e:
+ msg = f"decoder {decoder_name} not available"
+ raise OSError(msg) from e
+ return decoder(mode, *args + extra)
+
+
+def _getencoder(mode, encoder_name, args, extra=()):
+ # tweak arguments
+ if args is None:
+ args = ()
+ elif not isinstance(args, tuple):
+ args = (args,)
+
+ try:
+ encoder = ENCODERS[encoder_name]
+ except KeyError:
+ pass
+ else:
+ return encoder(mode, *args + extra)
+
+ try:
+ # get encoder
+ encoder = getattr(core, encoder_name + "_encoder")
+ except AttributeError as e:
+ msg = f"encoder {encoder_name} not available"
+ raise OSError(msg) from e
+ return encoder(mode, *args + extra)
+
+
+# --------------------------------------------------------------------
+# Simple expression analyzer
+
+
+class _E:
+ def __init__(self, scale, offset):
+ self.scale = scale
+ self.offset = offset
+
+ def __neg__(self):
+ return _E(-self.scale, -self.offset)
+
+ def __add__(self, other):
+ if isinstance(other, _E):
+ return _E(self.scale + other.scale, self.offset + other.offset)
+ return _E(self.scale, self.offset + other)
+
+ __radd__ = __add__
+
+ def __sub__(self, other):
+ return self + -other
+
+ def __rsub__(self, other):
+ return other + -self
+
+ def __mul__(self, other):
+ if isinstance(other, _E):
+ return NotImplemented
+ return _E(self.scale * other, self.offset * other)
+
+ __rmul__ = __mul__
+
+ def __truediv__(self, other):
+ if isinstance(other, _E):
+ return NotImplemented
+ return _E(self.scale / other, self.offset / other)
+
+
+def _getscaleoffset(expr):
+ a = expr(_E(1, 0))
+ return (a.scale, a.offset) if isinstance(a, _E) else (0, a)
+
+
+# --------------------------------------------------------------------
+# Implementation wrapper
+
+
+class Image:
+ """
+ This class represents an image object. To create
+ :py:class:`~PIL.Image.Image` objects, use the appropriate factory
+ functions. There's hardly ever any reason to call the Image constructor
+ directly.
+
+ * :py:func:`~PIL.Image.open`
+ * :py:func:`~PIL.Image.new`
+ * :py:func:`~PIL.Image.frombytes`
+ """
+
+ format: str | None = None
+ format_description: str | None = None
+ _close_exclusive_fp_after_loading = True
+
+ def __init__(self):
+ # FIXME: take "new" parameters / other image?
+ # FIXME: turn mode and size into delegating properties?
+ self.im = None
+ self._mode = ""
+ self._size = (0, 0)
+ self.palette = None
+ self.info = {}
+ self.readonly = 0
+ self.pyaccess = None
+ self._exif = None
+
+ @property
+ def width(self):
+ return self.size[0]
+
+ @property
+ def height(self):
+ return self.size[1]
+
+ @property
+ def size(self):
+ return self._size
+
+ @property
+ def mode(self):
+ return self._mode
+
+ def _new(self, im):
+ new = Image()
+ new.im = im
+ new._mode = im.mode
+ new._size = im.size
+ if im.mode in ("P", "PA"):
+ if self.palette:
+ new.palette = self.palette.copy()
+ else:
+ from . import ImagePalette
+
+ new.palette = ImagePalette.ImagePalette()
+ new.info = self.info.copy()
+ return new
+
+ # Context manager support
+ def __enter__(self):
+ return self
+
+ def _close_fp(self):
+ if getattr(self, "_fp", False):
+ if self._fp != self.fp:
+ self._fp.close()
+ self._fp = DeferredError(ValueError("Operation on closed image"))
+ if self.fp:
+ self.fp.close()
+
+ def __exit__(self, *args):
+ if hasattr(self, "fp"):
+ if getattr(self, "_exclusive_fp", False):
+ self._close_fp()
+ self.fp = None
+
+ def close(self):
+ """
+ Closes the file pointer, if possible.
+
+ This operation will destroy the image core and release its memory.
+ The image data will be unusable afterward.
+
+ This function is required to close images that have multiple frames or
+ have not had their file read and closed by the
+ :py:meth:`~PIL.Image.Image.load` method. See :ref:`file-handling` for
+ more information.
+ """
+ if hasattr(self, "fp"):
+ try:
+ self._close_fp()
+ self.fp = None
+ except Exception as msg:
+ logger.debug("Error closing: %s", msg)
+
+ if getattr(self, "map", None):
+ self.map = None
+
+ # Instead of simply setting to None, we're setting up a
+ # deferred error that will better explain that the core image
+ # object is gone.
+ self.im = DeferredError(ValueError("Operation on closed image"))
+
+ def _copy(self):
+ self.load()
+ self.im = self.im.copy()
+ self.pyaccess = None
+ self.readonly = 0
+
+ def _ensure_mutable(self):
+ if self.readonly:
+ self._copy()
+ else:
+ self.load()
+
+ def _dump(self, file=None, format=None, **options):
+ suffix = ""
+ if format:
+ suffix = "." + format
+
+ if not file:
+ f, filename = tempfile.mkstemp(suffix)
+ os.close(f)
+ else:
+ filename = file
+ if not filename.endswith(suffix):
+ filename = filename + suffix
+
+ self.load()
+
+ if not format or format == "PPM":
+ self.im.save_ppm(filename)
+ else:
+ self.save(filename, format, **options)
+
+ return filename
+
+ def __eq__(self, other):
+ return (
+ self.__class__ is other.__class__
+ and self.mode == other.mode
+ and self.size == other.size
+ and self.info == other.info
+ and self.getpalette() == other.getpalette()
+ and self.tobytes() == other.tobytes()
+ )
+
+ def __repr__(self):
+ return "<%s.%s image mode=%s size=%dx%d at 0x%X>" % (
+ self.__class__.__module__,
+ self.__class__.__name__,
+ self.mode,
+ self.size[0],
+ self.size[1],
+ id(self),
+ )
+
+ def _repr_pretty_(self, p, cycle):
+ """IPython plain text display support"""
+
+ # Same as __repr__ but without unpredictable id(self),
+ # to keep Jupyter notebook `text/plain` output stable.
+ p.text(
+ "<%s.%s image mode=%s size=%dx%d>"
+ % (
+ self.__class__.__module__,
+ self.__class__.__name__,
+ self.mode,
+ self.size[0],
+ self.size[1],
+ )
+ )
+
+ def _repr_image(self, image_format, **kwargs):
+ """Helper function for iPython display hook.
+
+ :param image_format: Image format.
+ :returns: image as bytes, saved into the given format.
+ """
+ b = io.BytesIO()
+ try:
+ self.save(b, image_format, **kwargs)
+ except Exception:
+ return None
+ return b.getvalue()
+
+ def _repr_png_(self):
+ """iPython display hook support for PNG format.
+
+ :returns: PNG version of the image as bytes
+ """
+ return self._repr_image("PNG", compress_level=1)
+
+ def _repr_jpeg_(self):
+ """iPython display hook support for JPEG format.
+
+ :returns: JPEG version of the image as bytes
+ """
+ return self._repr_image("JPEG")
+
+ @property
+ def __array_interface__(self):
+ # numpy array interface support
+ new = {"version": 3}
+ try:
+ if self.mode == "1":
+ # Binary images need to be extended from bits to bytes
+ # See: https://github.com/python-pillow/Pillow/issues/350
+ new["data"] = self.tobytes("raw", "L")
+ else:
+ new["data"] = self.tobytes()
+ except Exception as e:
+ if not isinstance(e, (MemoryError, RecursionError)):
+ try:
+ import numpy
+ from packaging.version import parse as parse_version
+ except ImportError:
+ pass
+ else:
+ if parse_version(numpy.__version__) < parse_version("1.23"):
+ warnings.warn(e)
+ raise
+ new["shape"], new["typestr"] = _conv_type_shape(self)
+ return new
+
+ def __getstate__(self):
+ im_data = self.tobytes() # load image first
+ return [self.info, self.mode, self.size, self.getpalette(), im_data]
+
+ def __setstate__(self, state):
+ Image.__init__(self)
+ info, mode, size, palette, data = state
+ self.info = info
+ self._mode = mode
+ self._size = size
+ self.im = core.new(mode, size)
+ if mode in ("L", "LA", "P", "PA") and palette:
+ self.putpalette(palette)
+ self.frombytes(data)
+
+ def tobytes(self, encoder_name="raw", *args):
+ """
+ Return image as a bytes object.
+
+ .. warning::
+
+ This method returns the raw image data from the internal
+ storage. For compressed image data (e.g. PNG, JPEG) use
+ :meth:`~.save`, with a BytesIO parameter for in-memory
+ data.
+
+ :param encoder_name: What encoder to use. The default is to
+ use the standard "raw" encoder.
+
+ A list of C encoders can be seen under
+ codecs section of the function array in
+ :file:`_imaging.c`. Python encoders are
+ registered within the relevant plugins.
+ :param args: Extra arguments to the encoder.
+ :returns: A :py:class:`bytes` object.
+ """
+
+ # may pass tuple instead of argument list
+ if len(args) == 1 and isinstance(args[0], tuple):
+ args = args[0]
+
+ if encoder_name == "raw" and args == ():
+ args = self.mode
+
+ self.load()
+
+ if self.width == 0 or self.height == 0:
+ return b""
+
+ # unpack data
+ e = _getencoder(self.mode, encoder_name, args)
+ e.setimage(self.im)
+
+ bufsize = max(65536, self.size[0] * 4) # see RawEncode.c
+
+ output = []
+ while True:
+ bytes_consumed, errcode, data = e.encode(bufsize)
+ output.append(data)
+ if errcode:
+ break
+ if errcode < 0:
+ msg = f"encoder error {errcode} in tobytes"
+ raise RuntimeError(msg)
+
+ return b"".join(output)
+
+ def tobitmap(self, name="image"):
+ """
+ Returns the image converted to an X11 bitmap.
+
+ .. note:: This method only works for mode "1" images.
+
+ :param name: The name prefix to use for the bitmap variables.
+ :returns: A string containing an X11 bitmap.
+ :raises ValueError: If the mode is not "1"
+ """
+
+ self.load()
+ if self.mode != "1":
+ msg = "not a bitmap"
+ raise ValueError(msg)
+ data = self.tobytes("xbm")
+ return b"".join(
+ [
+ f"#define {name}_width {self.size[0]}\n".encode("ascii"),
+ f"#define {name}_height {self.size[1]}\n".encode("ascii"),
+ f"static char {name}_bits[] = {{\n".encode("ascii"),
+ data,
+ b"};",
+ ]
+ )
+
+ def frombytes(self, data, decoder_name="raw", *args):
+ """
+ Loads this image with pixel data from a bytes object.
+
+ This method is similar to the :py:func:`~PIL.Image.frombytes` function,
+ but loads data into this image instead of creating a new image object.
+ """
+
+ if self.width == 0 or self.height == 0:
+ return
+
+ # may pass tuple instead of argument list
+ if len(args) == 1 and isinstance(args[0], tuple):
+ args = args[0]
+
+ # default format
+ if decoder_name == "raw" and args == ():
+ args = self.mode
+
+ # unpack data
+ d = _getdecoder(self.mode, decoder_name, args)
+ d.setimage(self.im)
+ s = d.decode(data)
+
+ if s[0] >= 0:
+ msg = "not enough image data"
+ raise ValueError(msg)
+ if s[1] != 0:
+ msg = "cannot decode image data"
+ raise ValueError(msg)
+
+ def load(self):
+ """
+ Allocates storage for the image and loads the pixel data. In
+ normal cases, you don't need to call this method, since the
+ Image class automatically loads an opened image when it is
+ accessed for the first time.
+
+ If the file associated with the image was opened by Pillow, then this
+ method will close it. The exception to this is if the image has
+ multiple frames, in which case the file will be left open for seek
+ operations. See :ref:`file-handling` for more information.
+
+ :returns: An image access object.
+ :rtype: :ref:`PixelAccess` or :py:class:`PIL.PyAccess`
+ """
+ if self.im is not None and self.palette and self.palette.dirty:
+ # realize palette
+ mode, arr = self.palette.getdata()
+ self.im.putpalette(mode, arr)
+ self.palette.dirty = 0
+ self.palette.rawmode = None
+ if "transparency" in self.info and mode in ("LA", "PA"):
+ if isinstance(self.info["transparency"], int):
+ self.im.putpalettealpha(self.info["transparency"], 0)
+ else:
+ self.im.putpalettealphas(self.info["transparency"])
+ self.palette.mode = "RGBA"
+ else:
+ palette_mode = "RGBA" if mode.startswith("RGBA") else "RGB"
+ self.palette.mode = palette_mode
+ self.palette.palette = self.im.getpalette(palette_mode, palette_mode)
+
+ if self.im is not None:
+ if cffi and USE_CFFI_ACCESS:
+ if self.pyaccess:
+ return self.pyaccess
+ from . import PyAccess
+
+ self.pyaccess = PyAccess.new(self, self.readonly)
+ if self.pyaccess:
+ return self.pyaccess
+ return self.im.pixel_access(self.readonly)
+
+ def verify(self):
+ """
+ Verifies the contents of a file. For data read from a file, this
+ method attempts to determine if the file is broken, without
+ actually decoding the image data. If this method finds any
+ problems, it raises suitable exceptions. If you need to load
+ the image after using this method, you must reopen the image
+ file.
+ """
+ pass
+
+ def convert(
+ self, mode=None, matrix=None, dither=None, palette=Palette.WEB, colors=256
+ ):
+ """
+ Returns a converted copy of this image. For the "P" mode, this
+ method translates pixels through the palette. If mode is
+ omitted, a mode is chosen so that all information in the image
+ and the palette can be represented without a palette.
+
+ The current version supports all possible conversions between
+ "L", "RGB" and "CMYK". The ``matrix`` argument only supports "L"
+ and "RGB".
+
+ When translating a color image to grayscale (mode "L"),
+ the library uses the ITU-R 601-2 luma transform::
+
+ L = R * 299/1000 + G * 587/1000 + B * 114/1000
+
+ The default method of converting a grayscale ("L") or "RGB"
+ image into a bilevel (mode "1") image uses Floyd-Steinberg
+ dither to approximate the original image luminosity levels. If
+ dither is ``None``, all values larger than 127 are set to 255 (white),
+ all other values to 0 (black). To use other thresholds, use the
+ :py:meth:`~PIL.Image.Image.point` method.
+
+ When converting from "RGBA" to "P" without a ``matrix`` argument,
+ this passes the operation to :py:meth:`~PIL.Image.Image.quantize`,
+ and ``dither`` and ``palette`` are ignored.
+
+ When converting from "PA", if an "RGBA" palette is present, the alpha
+ channel from the image will be used instead of the values from the palette.
+
+ :param mode: The requested mode. See: :ref:`concept-modes`.
+ :param matrix: An optional conversion matrix. If given, this
+ should be 4- or 12-tuple containing floating point values.
+ :param dither: Dithering method, used when converting from
+ mode "RGB" to "P" or from "RGB" or "L" to "1".
+ Available methods are :data:`Dither.NONE` or :data:`Dither.FLOYDSTEINBERG`
+ (default). Note that this is not used when ``matrix`` is supplied.
+ :param palette: Palette to use when converting from mode "RGB"
+ to "P". Available palettes are :data:`Palette.WEB` or
+ :data:`Palette.ADAPTIVE`.
+ :param colors: Number of colors to use for the :data:`Palette.ADAPTIVE`
+ palette. Defaults to 256.
+ :rtype: :py:class:`~PIL.Image.Image`
+ :returns: An :py:class:`~PIL.Image.Image` object.
+ """
+
+ self.load()
+
+ has_transparency = "transparency" in self.info
+ if not mode and self.mode == "P":
+ # determine default mode
+ if self.palette:
+ mode = self.palette.mode
+ else:
+ mode = "RGB"
+ if mode == "RGB" and has_transparency:
+ mode = "RGBA"
+ if not mode or (mode == self.mode and not matrix):
+ return self.copy()
+
+ if matrix:
+ # matrix conversion
+ if mode not in ("L", "RGB"):
+ msg = "illegal conversion"
+ raise ValueError(msg)
+ im = self.im.convert_matrix(mode, matrix)
+ new_im = self._new(im)
+ if has_transparency and self.im.bands == 3:
+ transparency = new_im.info["transparency"]
+
+ def convert_transparency(m, v):
+ v = m[0] * v[0] + m[1] * v[1] + m[2] * v[2] + m[3] * 0.5
+ return max(0, min(255, int(v)))
+
+ if mode == "L":
+ transparency = convert_transparency(matrix, transparency)
+ elif len(mode) == 3:
+ transparency = tuple(
+ convert_transparency(matrix[i * 4 : i * 4 + 4], transparency)
+ for i in range(0, len(transparency))
+ )
+ new_im.info["transparency"] = transparency
+ return new_im
+
+ if mode == "P" and self.mode == "RGBA":
+ return self.quantize(colors)
+
+ trns = None
+ delete_trns = False
+ # transparency handling
+ if has_transparency:
+ if (self.mode in ("1", "L", "I") and mode in ("LA", "RGBA")) or (
+ self.mode == "RGB" and mode == "RGBA"
+ ):
+ # Use transparent conversion to promote from transparent
+ # color to an alpha channel.
+ new_im = self._new(
+ self.im.convert_transparent(mode, self.info["transparency"])
+ )
+ del new_im.info["transparency"]
+ return new_im
+ elif self.mode in ("L", "RGB", "P") and mode in ("L", "RGB", "P"):
+ t = self.info["transparency"]
+ if isinstance(t, bytes):
+ # Dragons. This can't be represented by a single color
+ warnings.warn(
+ "Palette images with Transparency expressed in bytes should be "
+ "converted to RGBA images"
+ )
+ delete_trns = True
+ else:
+ # get the new transparency color.
+ # use existing conversions
+ trns_im = new(self.mode, (1, 1))
+ if self.mode == "P":
+ trns_im.putpalette(self.palette)
+ if isinstance(t, tuple):
+ err = "Couldn't allocate a palette color for transparency"
+ try:
+ t = trns_im.palette.getcolor(t, self)
+ except ValueError as e:
+ if str(e) == "cannot allocate more than 256 colors":
+ # If all 256 colors are in use,
+ # then there is no need for transparency
+ t = None
+ else:
+ raise ValueError(err) from e
+ if t is None:
+ trns = None
+ else:
+ trns_im.putpixel((0, 0), t)
+
+ if mode in ("L", "RGB"):
+ trns_im = trns_im.convert(mode)
+ else:
+ # can't just retrieve the palette number, got to do it
+ # after quantization.
+ trns_im = trns_im.convert("RGB")
+ trns = trns_im.getpixel((0, 0))
+
+ elif self.mode == "P" and mode in ("LA", "PA", "RGBA"):
+ t = self.info["transparency"]
+ delete_trns = True
+
+ if isinstance(t, bytes):
+ self.im.putpalettealphas(t)
+ elif isinstance(t, int):
+ self.im.putpalettealpha(t, 0)
+ else:
+ msg = "Transparency for P mode should be bytes or int"
+ raise ValueError(msg)
+
+ if mode == "P" and palette == Palette.ADAPTIVE:
+ im = self.im.quantize(colors)
+ new_im = self._new(im)
+ from . import ImagePalette
+
+ new_im.palette = ImagePalette.ImagePalette(
+ "RGB", new_im.im.getpalette("RGB")
+ )
+ if delete_trns:
+ # This could possibly happen if we requantize to fewer colors.
+ # The transparency would be totally off in that case.
+ del new_im.info["transparency"]
+ if trns is not None:
+ try:
+ new_im.info["transparency"] = new_im.palette.getcolor(trns, new_im)
+ except Exception:
+ # if we can't make a transparent color, don't leave the old
+ # transparency hanging around to mess us up.
+ del new_im.info["transparency"]
+ warnings.warn("Couldn't allocate palette entry for transparency")
+ return new_im
+
+ if "LAB" in (self.mode, mode):
+ other_mode = mode if self.mode == "LAB" else self.mode
+ if other_mode in ("RGB", "RGBA", "RGBX"):
+ from . import ImageCms
+
+ srgb = ImageCms.createProfile("sRGB")
+ lab = ImageCms.createProfile("LAB")
+ profiles = [lab, srgb] if self.mode == "LAB" else [srgb, lab]
+ transform = ImageCms.buildTransform(
+ profiles[0], profiles[1], self.mode, mode
+ )
+ return transform.apply(self)
+
+ # colorspace conversion
+ if dither is None:
+ dither = Dither.FLOYDSTEINBERG
+
+ try:
+ im = self.im.convert(mode, dither)
+ except ValueError:
+ try:
+ # normalize source image and try again
+ modebase = getmodebase(self.mode)
+ if modebase == self.mode:
+ raise
+ im = self.im.convert(modebase)
+ im = im.convert(mode, dither)
+ except KeyError as e:
+ msg = "illegal conversion"
+ raise ValueError(msg) from e
+
+ new_im = self._new(im)
+ if mode == "P" and palette != Palette.ADAPTIVE:
+ from . import ImagePalette
+
+ new_im.palette = ImagePalette.ImagePalette("RGB", im.getpalette("RGB"))
+ if delete_trns:
+ # crash fail if we leave a bytes transparency in an rgb/l mode.
+ del new_im.info["transparency"]
+ if trns is not None:
+ if new_im.mode == "P":
+ try:
+ new_im.info["transparency"] = new_im.palette.getcolor(trns, new_im)
+ except ValueError as e:
+ del new_im.info["transparency"]
+ if str(e) != "cannot allocate more than 256 colors":
+ # If all 256 colors are in use,
+ # then there is no need for transparency
+ warnings.warn(
+ "Couldn't allocate palette entry for transparency"
+ )
+ else:
+ new_im.info["transparency"] = trns
+ return new_im
+
+ def quantize(
+ self,
+ colors=256,
+ method=None,
+ kmeans=0,
+ palette=None,
+ dither=Dither.FLOYDSTEINBERG,
+ ):
+ """
+ Convert the image to 'P' mode with the specified number
+ of colors.
+
+ :param colors: The desired number of colors, <= 256
+ :param method: :data:`Quantize.MEDIANCUT` (median cut),
+ :data:`Quantize.MAXCOVERAGE` (maximum coverage),
+ :data:`Quantize.FASTOCTREE` (fast octree),
+ :data:`Quantize.LIBIMAGEQUANT` (libimagequant; check support
+ using :py:func:`PIL.features.check_feature` with
+ ``feature="libimagequant"``).
+
+ By default, :data:`Quantize.MEDIANCUT` will be used.
+
+ The exception to this is RGBA images. :data:`Quantize.MEDIANCUT`
+ and :data:`Quantize.MAXCOVERAGE` do not support RGBA images, so
+ :data:`Quantize.FASTOCTREE` is used by default instead.
+ :param kmeans: Integer
+ :param palette: Quantize to the palette of given
+ :py:class:`PIL.Image.Image`.
+ :param dither: Dithering method, used when converting from
+ mode "RGB" to "P" or from "RGB" or "L" to "1".
+ Available methods are :data:`Dither.NONE` or :data:`Dither.FLOYDSTEINBERG`
+ (default).
+ :returns: A new image
+ """
+
+ self.load()
+
+ if method is None:
+ # defaults:
+ method = Quantize.MEDIANCUT
+ if self.mode == "RGBA":
+ method = Quantize.FASTOCTREE
+
+ if self.mode == "RGBA" and method not in (
+ Quantize.FASTOCTREE,
+ Quantize.LIBIMAGEQUANT,
+ ):
+ # Caller specified an invalid mode.
+ msg = (
+ "Fast Octree (method == 2) and libimagequant (method == 3) "
+ "are the only valid methods for quantizing RGBA images"
+ )
+ raise ValueError(msg)
+
+ if palette:
+ # use palette from reference image
+ palette.load()
+ if palette.mode != "P":
+ msg = "bad mode for palette image"
+ raise ValueError(msg)
+ if self.mode not in {"RGB", "L"}:
+ msg = "only RGB or L mode images can be quantized to a palette"
+ raise ValueError(msg)
+ im = self.im.convert("P", dither, palette.im)
+ new_im = self._new(im)
+ new_im.palette = palette.palette.copy()
+ return new_im
+
+ im = self._new(self.im.quantize(colors, method, kmeans))
+
+ from . import ImagePalette
+
+ mode = im.im.getpalettemode()
+ palette = im.im.getpalette(mode, mode)[: colors * len(mode)]
+ im.palette = ImagePalette.ImagePalette(mode, palette)
+
+ return im
+
+ def copy(self) -> Image:
+ """
+ Copies this image. Use this method if you wish to paste things
+ into an image, but still retain the original.
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ :returns: An :py:class:`~PIL.Image.Image` object.
+ """
+ self.load()
+ return self._new(self.im.copy())
+
+ __copy__ = copy
+
+ def crop(self, box=None) -> Image:
+ """
+ Returns a rectangular region from this image. The box is a
+ 4-tuple defining the left, upper, right, and lower pixel
+ coordinate. See :ref:`coordinate-system`.
+
+ Note: Prior to Pillow 3.4.0, this was a lazy operation.
+
+ :param box: The crop rectangle, as a (left, upper, right, lower)-tuple.
+ :rtype: :py:class:`~PIL.Image.Image`
+ :returns: An :py:class:`~PIL.Image.Image` object.
+ """
+
+ if box is None:
+ return self.copy()
+
+ if box[2] < box[0]:
+ msg = "Coordinate 'right' is less than 'left'"
+ raise ValueError(msg)
+ elif box[3] < box[1]:
+ msg = "Coordinate 'lower' is less than 'upper'"
+ raise ValueError(msg)
+
+ self.load()
+ return self._new(self._crop(self.im, box))
+
+ def _crop(self, im, box):
+ """
+ Returns a rectangular region from the core image object im.
+
+ This is equivalent to calling im.crop((x0, y0, x1, y1)), but
+ includes additional sanity checks.
+
+ :param im: a core image object
+ :param box: The crop rectangle, as a (left, upper, right, lower)-tuple.
+ :returns: A core image object.
+ """
+
+ x0, y0, x1, y1 = map(int, map(round, box))
+
+ absolute_values = (abs(x1 - x0), abs(y1 - y0))
+
+ _decompression_bomb_check(absolute_values)
+
+ return im.crop((x0, y0, x1, y1))
+
+ def draft(self, mode, size):
+ """
+ Configures the image file loader so it returns a version of the
+ image that as closely as possible matches the given mode and
+ size. For example, you can use this method to convert a color
+ JPEG to grayscale while loading it.
+
+ If any changes are made, returns a tuple with the chosen ``mode`` and
+ ``box`` with coordinates of the original image within the altered one.
+
+ Note that this method modifies the :py:class:`~PIL.Image.Image` object
+ in place. If the image has already been loaded, this method has no
+ effect.
+
+ Note: This method is not implemented for most images. It is
+ currently implemented only for JPEG and MPO images.
+
+ :param mode: The requested mode.
+ :param size: The requested size in pixels, as a 2-tuple:
+ (width, height).
+ """
+ pass
+
+ def _expand(self, xmargin, ymargin=None):
+ if ymargin is None:
+ ymargin = xmargin
+ self.load()
+ return self._new(self.im.expand(xmargin, ymargin))
+
+ def filter(self, filter):
+ """
+ Filters this image using the given filter. For a list of
+ available filters, see the :py:mod:`~PIL.ImageFilter` module.
+
+ :param filter: Filter kernel.
+ :returns: An :py:class:`~PIL.Image.Image` object."""
+
+ from . import ImageFilter
+
+ self.load()
+
+ if isinstance(filter, Callable):
+ filter = filter()
+ if not hasattr(filter, "filter"):
+ msg = "filter argument should be ImageFilter.Filter instance or class"
+ raise TypeError(msg)
+
+ multiband = isinstance(filter, ImageFilter.MultibandFilter)
+ if self.im.bands == 1 or multiband:
+ return self._new(filter.filter(self.im))
+
+ ims = [
+ self._new(filter.filter(self.im.getband(c))) for c in range(self.im.bands)
+ ]
+ return merge(self.mode, ims)
+
+ def getbands(self):
+ """
+ Returns a tuple containing the name of each band in this image.
+ For example, ``getbands`` on an RGB image returns ("R", "G", "B").
+
+ :returns: A tuple containing band names.
+ :rtype: tuple
+ """
+ return ImageMode.getmode(self.mode).bands
+
+ def getbbox(self, *, alpha_only=True):
+ """
+ Calculates the bounding box of the non-zero regions in the
+ image.
+
+ :param alpha_only: Optional flag, defaulting to ``True``.
+ If ``True`` and the image has an alpha channel, trim transparent pixels.
+ Otherwise, trim pixels when all channels are zero.
+ Keyword-only argument.
+ :returns: The bounding box is returned as a 4-tuple defining the
+ left, upper, right, and lower pixel coordinate. See
+ :ref:`coordinate-system`. If the image is completely empty, this
+ method returns None.
+
+ """
+
+ self.load()
+ return self.im.getbbox(alpha_only)
+
+ def getcolors(self, maxcolors=256):
+ """
+ Returns a list of colors used in this image.
+
+ The colors will be in the image's mode. For example, an RGB image will
+ return a tuple of (red, green, blue) color values, and a P image will
+ return the index of the color in the palette.
+
+ :param maxcolors: Maximum number of colors. If this number is
+ exceeded, this method returns None. The default limit is
+ 256 colors.
+ :returns: An unsorted list of (count, pixel) values.
+ """
+
+ self.load()
+ if self.mode in ("1", "L", "P"):
+ h = self.im.histogram()
+ out = [(h[i], i) for i in range(256) if h[i]]
+ if len(out) > maxcolors:
+ return None
+ return out
+ return self.im.getcolors(maxcolors)
+
+ def getdata(self, band=None):
+ """
+ Returns the contents of this image as a sequence object
+ containing pixel values. The sequence object is flattened, so
+ that values for line one follow directly after the values of
+ line zero, and so on.
+
+ Note that the sequence object returned by this method is an
+ internal PIL data type, which only supports certain sequence
+ operations. To convert it to an ordinary sequence (e.g. for
+ printing), use ``list(im.getdata())``.
+
+ :param band: What band to return. The default is to return
+ all bands. To return a single band, pass in the index
+ value (e.g. 0 to get the "R" band from an "RGB" image).
+ :returns: A sequence-like object.
+ """
+
+ self.load()
+ if band is not None:
+ return self.im.getband(band)
+ return self.im # could be abused
+
+ def getextrema(self):
+ """
+ Gets the minimum and maximum pixel values for each band in
+ the image.
+
+ :returns: For a single-band image, a 2-tuple containing the
+ minimum and maximum pixel value. For a multi-band image,
+ a tuple containing one 2-tuple for each band.
+ """
+
+ self.load()
+ if self.im.bands > 1:
+ return tuple(self.im.getband(i).getextrema() for i in range(self.im.bands))
+ return self.im.getextrema()
+
+ def _getxmp(self, xmp_tags):
+ def get_name(tag):
+ return re.sub("^{[^}]+}", "", tag)
+
+ def get_value(element):
+ value = {get_name(k): v for k, v in element.attrib.items()}
+ children = list(element)
+ if children:
+ for child in children:
+ name = get_name(child.tag)
+ child_value = get_value(child)
+ if name in value:
+ if not isinstance(value[name], list):
+ value[name] = [value[name]]
+ value[name].append(child_value)
+ else:
+ value[name] = child_value
+ elif value:
+ if element.text:
+ value["text"] = element.text
+ else:
+ return element.text
+ return value
+
+ if ElementTree is None:
+ warnings.warn("XMP data cannot be read without defusedxml dependency")
+ return {}
+ else:
+ root = ElementTree.fromstring(xmp_tags)
+ return {get_name(root.tag): get_value(root)}
+
+ def getexif(self):
+ """
+ Gets EXIF data from the image.
+
+ :returns: an :py:class:`~PIL.Image.Exif` object.
+ """
+ if self._exif is None:
+ self._exif = Exif()
+ self._exif._loaded = False
+ elif self._exif._loaded:
+ return self._exif
+ self._exif._loaded = True
+
+ exif_info = self.info.get("exif")
+ if exif_info is None:
+ if "Raw profile type exif" in self.info:
+ exif_info = bytes.fromhex(
+ "".join(self.info["Raw profile type exif"].split("\n")[3:])
+ )
+ elif hasattr(self, "tag_v2"):
+ self._exif.bigtiff = self.tag_v2._bigtiff
+ self._exif.endian = self.tag_v2._endian
+ self._exif.load_from_fp(self.fp, self.tag_v2._offset)
+ if exif_info is not None:
+ self._exif.load(exif_info)
+
+ # XMP tags
+ if ExifTags.Base.Orientation not in self._exif:
+ xmp_tags = self.info.get("XML:com.adobe.xmp")
+ if xmp_tags:
+ match = re.search(r'tiff:Orientation(="|>)([0-9])', xmp_tags)
+ if match:
+ self._exif[ExifTags.Base.Orientation] = int(match[2])
+
+ return self._exif
+
+ def _reload_exif(self):
+ if self._exif is None or not self._exif._loaded:
+ return
+ self._exif._loaded = False
+ self.getexif()
+
+ def get_child_images(self):
+ child_images = []
+ exif = self.getexif()
+ ifds = []
+ if ExifTags.Base.SubIFDs in exif:
+ subifd_offsets = exif[ExifTags.Base.SubIFDs]
+ if subifd_offsets:
+ if not isinstance(subifd_offsets, tuple):
+ subifd_offsets = (subifd_offsets,)
+ for subifd_offset in subifd_offsets:
+ ifds.append((exif._get_ifd_dict(subifd_offset), subifd_offset))
+ ifd1 = exif.get_ifd(ExifTags.IFD.IFD1)
+ if ifd1 and ifd1.get(513):
+ ifds.append((ifd1, exif._info.next))
+
+ offset = None
+ for ifd, ifd_offset in ifds:
+ current_offset = self.fp.tell()
+ if offset is None:
+ offset = current_offset
+
+ fp = self.fp
+ thumbnail_offset = ifd.get(513)
+ if thumbnail_offset is not None:
+ try:
+ thumbnail_offset += self._exif_offset
+ except AttributeError:
+ pass
+ self.fp.seek(thumbnail_offset)
+ data = self.fp.read(ifd.get(514))
+ fp = io.BytesIO(data)
+
+ with open(fp) as im:
+ if thumbnail_offset is None:
+ im._frame_pos = [ifd_offset]
+ im._seek(0)
+ im.load()
+ child_images.append(im)
+
+ if offset is not None:
+ self.fp.seek(offset)
+ return child_images
+
+ def getim(self):
+ """
+ Returns a capsule that points to the internal image memory.
+
+ :returns: A capsule object.
+ """
+
+ self.load()
+ return self.im.ptr
+
+ def getpalette(self, rawmode="RGB"):
+ """
+ Returns the image palette as a list.
+
+ :param rawmode: The mode in which to return the palette. ``None`` will
+ return the palette in its current mode.
+
+ .. versionadded:: 9.1.0
+
+ :returns: A list of color values [r, g, b, ...], or None if the
+ image has no palette.
+ """
+
+ self.load()
+ try:
+ mode = self.im.getpalettemode()
+ except ValueError:
+ return None # no palette
+ if rawmode is None:
+ rawmode = mode
+ return list(self.im.getpalette(mode, rawmode))
+
+ @property
+ def has_transparency_data(self) -> bool:
+ """
+ Determine if an image has transparency data, whether in the form of an
+ alpha channel, a palette with an alpha channel, or a "transparency" key
+ in the info dictionary.
+
+ Note the image might still appear solid, if all of the values shown
+ within are opaque.
+
+ :returns: A boolean.
+ """
+ return (
+ self.mode in ("LA", "La", "PA", "RGBA", "RGBa")
+ or (self.mode == "P" and self.palette.mode.endswith("A"))
+ or "transparency" in self.info
+ )
+
+ def apply_transparency(self):
+ """
+ If a P mode image has a "transparency" key in the info dictionary,
+ remove the key and instead apply the transparency to the palette.
+ Otherwise, the image is unchanged.
+ """
+ if self.mode != "P" or "transparency" not in self.info:
+ return
+
+ from . import ImagePalette
+
+ palette = self.getpalette("RGBA")
+ transparency = self.info["transparency"]
+ if isinstance(transparency, bytes):
+ for i, alpha in enumerate(transparency):
+ palette[i * 4 + 3] = alpha
+ else:
+ palette[transparency * 4 + 3] = 0
+ self.palette = ImagePalette.ImagePalette("RGBA", bytes(palette))
+ self.palette.dirty = 1
+
+ del self.info["transparency"]
+
+ def getpixel(self, xy):
+ """
+ Returns the pixel value at a given position.
+
+ :param xy: The coordinate, given as (x, y). See
+ :ref:`coordinate-system`.
+ :returns: The pixel value. If the image is a multi-layer image,
+ this method returns a tuple.
+ """
+
+ self.load()
+ if self.pyaccess:
+ return self.pyaccess.getpixel(xy)
+ return self.im.getpixel(tuple(xy))
+
+ def getprojection(self):
+ """
+ Get projection to x and y axes
+
+ :returns: Two sequences, indicating where there are non-zero
+ pixels along the X-axis and the Y-axis, respectively.
+ """
+
+ self.load()
+ x, y = self.im.getprojection()
+ return list(x), list(y)
+
+ def histogram(self, mask=None, extrema=None):
+ """
+ Returns a histogram for the image. The histogram is returned as a
+ list of pixel counts, one for each pixel value in the source
+ image. Counts are grouped into 256 bins for each band, even if
+ the image has more than 8 bits per band. If the image has more
+ than one band, the histograms for all bands are concatenated (for
+ example, the histogram for an "RGB" image contains 768 values).
+
+ A bilevel image (mode "1") is treated as a grayscale ("L") image
+ by this method.
+
+ If a mask is provided, the method returns a histogram for those
+ parts of the image where the mask image is non-zero. The mask
+ image must have the same size as the image, and be either a
+ bi-level image (mode "1") or a grayscale image ("L").
+
+ :param mask: An optional mask.
+ :param extrema: An optional tuple of manually-specified extrema.
+ :returns: A list containing pixel counts.
+ """
+ self.load()
+ if mask:
+ mask.load()
+ return self.im.histogram((0, 0), mask.im)
+ if self.mode in ("I", "F"):
+ if extrema is None:
+ extrema = self.getextrema()
+ return self.im.histogram(extrema)
+ return self.im.histogram()
+
+ def entropy(self, mask=None, extrema=None):
+ """
+ Calculates and returns the entropy for the image.
+
+ A bilevel image (mode "1") is treated as a grayscale ("L")
+ image by this method.
+
+ If a mask is provided, the method employs the histogram for
+ those parts of the image where the mask image is non-zero.
+ The mask image must have the same size as the image, and be
+ either a bi-level image (mode "1") or a grayscale image ("L").
+
+ :param mask: An optional mask.
+ :param extrema: An optional tuple of manually-specified extrema.
+ :returns: A float value representing the image entropy
+ """
+ self.load()
+ if mask:
+ mask.load()
+ return self.im.entropy((0, 0), mask.im)
+ if self.mode in ("I", "F"):
+ if extrema is None:
+ extrema = self.getextrema()
+ return self.im.entropy(extrema)
+ return self.im.entropy()
+
+ def paste(self, im, box=None, mask=None) -> None:
+ """
+ Pastes another image into this image. The box argument is either
+ a 2-tuple giving the upper left corner, a 4-tuple defining the
+ left, upper, right, and lower pixel coordinate, or None (same as
+ (0, 0)). See :ref:`coordinate-system`. If a 4-tuple is given, the size
+ of the pasted image must match the size of the region.
+
+ If the modes don't match, the pasted image is converted to the mode of
+ this image (see the :py:meth:`~PIL.Image.Image.convert` method for
+ details).
+
+ Instead of an image, the source can be a integer or tuple
+ containing pixel values. The method then fills the region
+ with the given color. When creating RGB images, you can
+ also use color strings as supported by the ImageColor module.
+
+ If a mask is given, this method updates only the regions
+ indicated by the mask. You can use either "1", "L", "LA", "RGBA"
+ or "RGBa" images (if present, the alpha band is used as mask).
+ Where the mask is 255, the given image is copied as is. Where
+ the mask is 0, the current value is preserved. Intermediate
+ values will mix the two images together, including their alpha
+ channels if they have them.
+
+ See :py:meth:`~PIL.Image.Image.alpha_composite` if you want to
+ combine images with respect to their alpha channels.
+
+ :param im: Source image or pixel value (integer or tuple).
+ :param box: An optional 4-tuple giving the region to paste into.
+ If a 2-tuple is used instead, it's treated as the upper left
+ corner. If omitted or None, the source is pasted into the
+ upper left corner.
+
+ If an image is given as the second argument and there is no
+ third, the box defaults to (0, 0), and the second argument
+ is interpreted as a mask image.
+ :param mask: An optional mask image.
+ """
+
+ if isImageType(box) and mask is None:
+ # abbreviated paste(im, mask) syntax
+ mask = box
+ box = None
+
+ if box is None:
+ box = (0, 0)
+
+ if len(box) == 2:
+ # upper left corner given; get size from image or mask
+ if isImageType(im):
+ size = im.size
+ elif isImageType(mask):
+ size = mask.size
+ else:
+ # FIXME: use self.size here?
+ msg = "cannot determine region size; use 4-item box"
+ raise ValueError(msg)
+ box += (box[0] + size[0], box[1] + size[1])
+
+ if isinstance(im, str):
+ from . import ImageColor
+
+ im = ImageColor.getcolor(im, self.mode)
+
+ elif isImageType(im):
+ im.load()
+ if self.mode != im.mode:
+ if self.mode != "RGB" or im.mode not in ("LA", "RGBA", "RGBa"):
+ # should use an adapter for this!
+ im = im.convert(self.mode)
+ im = im.im
+
+ self._ensure_mutable()
+
+ if mask:
+ mask.load()
+ self.im.paste(im, box, mask.im)
+ else:
+ self.im.paste(im, box)
+
+ def alpha_composite(self, im, dest=(0, 0), source=(0, 0)):
+ """'In-place' analog of Image.alpha_composite. Composites an image
+ onto this image.
+
+ :param im: image to composite over this one
+ :param dest: Optional 2 tuple (left, top) specifying the upper
+ left corner in this (destination) image.
+ :param source: Optional 2 (left, top) tuple for the upper left
+ corner in the overlay source image, or 4 tuple (left, top, right,
+ bottom) for the bounds of the source rectangle
+
+ Performance Note: Not currently implemented in-place in the core layer.
+ """
+
+ if not isinstance(source, (list, tuple)):
+ msg = "Source must be a tuple"
+ raise ValueError(msg)
+ if not isinstance(dest, (list, tuple)):
+ msg = "Destination must be a tuple"
+ raise ValueError(msg)
+ if len(source) not in (2, 4):
+ msg = "Source must be a 2 or 4-tuple"
+ raise ValueError(msg)
+ if not len(dest) == 2:
+ msg = "Destination must be a 2-tuple"
+ raise ValueError(msg)
+ if min(source) < 0:
+ msg = "Source must be non-negative"
+ raise ValueError(msg)
+
+ if len(source) == 2:
+ source = source + im.size
+
+ # over image, crop if it's not the whole thing.
+ if source == (0, 0) + im.size:
+ overlay = im
+ else:
+ overlay = im.crop(source)
+
+ # target for the paste
+ box = dest + (dest[0] + overlay.width, dest[1] + overlay.height)
+
+ # destination image. don't copy if we're using the whole image.
+ if box == (0, 0) + self.size:
+ background = self
+ else:
+ background = self.crop(box)
+
+ result = alpha_composite(background, overlay)
+ self.paste(result, box)
+
+ def point(self, lut, mode=None):
+ """
+ Maps this image through a lookup table or function.
+
+ :param lut: A lookup table, containing 256 (or 65536 if
+ self.mode=="I" and mode == "L") values per band in the
+ image. A function can be used instead, it should take a
+ single argument. The function is called once for each
+ possible pixel value, and the resulting table is applied to
+ all bands of the image.
+
+ It may also be an :py:class:`~PIL.Image.ImagePointHandler`
+ object::
+
+ class Example(Image.ImagePointHandler):
+ def point(self, data):
+ # Return result
+ :param mode: Output mode (default is same as input). In the
+ current version, this can only be used if the source image
+ has mode "L" or "P", and the output has mode "1" or the
+ source image mode is "I" and the output mode is "L".
+ :returns: An :py:class:`~PIL.Image.Image` object.
+ """
+
+ self.load()
+
+ if isinstance(lut, ImagePointHandler):
+ return lut.point(self)
+
+ if callable(lut):
+ # if it isn't a list, it should be a function
+ if self.mode in ("I", "I;16", "F"):
+ # check if the function can be used with point_transform
+ # UNDONE wiredfool -- I think this prevents us from ever doing
+ # a gamma function point transform on > 8bit images.
+ scale, offset = _getscaleoffset(lut)
+ return self._new(self.im.point_transform(scale, offset))
+ # for other modes, convert the function to a table
+ lut = [lut(i) for i in range(256)] * self.im.bands
+
+ if self.mode == "F":
+ # FIXME: _imaging returns a confusing error message for this case
+ msg = "point operation not supported for this mode"
+ raise ValueError(msg)
+
+ if mode != "F":
+ lut = [round(i) for i in lut]
+ return self._new(self.im.point(lut, mode))
+
+ def putalpha(self, alpha):
+ """
+ Adds or replaces the alpha layer in this image. If the image
+ does not have an alpha layer, it's converted to "LA" or "RGBA".
+ The new layer must be either "L" or "1".
+
+ :param alpha: The new alpha layer. This can either be an "L" or "1"
+ image having the same size as this image, or an integer or
+ other color value.
+ """
+
+ self._ensure_mutable()
+
+ if self.mode not in ("LA", "PA", "RGBA"):
+ # attempt to promote self to a matching alpha mode
+ try:
+ mode = getmodebase(self.mode) + "A"
+ try:
+ self.im.setmode(mode)
+ except (AttributeError, ValueError) as e:
+ # do things the hard way
+ im = self.im.convert(mode)
+ if im.mode not in ("LA", "PA", "RGBA"):
+ msg = "alpha channel could not be added"
+ raise ValueError(msg) from e # sanity check
+ self.im = im
+ self.pyaccess = None
+ self._mode = self.im.mode
+ except KeyError as e:
+ msg = "illegal image mode"
+ raise ValueError(msg) from e
+
+ if self.mode in ("LA", "PA"):
+ band = 1
+ else:
+ band = 3
+
+ if isImageType(alpha):
+ # alpha layer
+ if alpha.mode not in ("1", "L"):
+ msg = "illegal image mode"
+ raise ValueError(msg)
+ alpha.load()
+ if alpha.mode == "1":
+ alpha = alpha.convert("L")
+ else:
+ # constant alpha
+ try:
+ self.im.fillband(band, alpha)
+ except (AttributeError, ValueError):
+ # do things the hard way
+ alpha = new("L", self.size, alpha)
+ else:
+ return
+
+ self.im.putband(alpha.im, band)
+
+ def putdata(self, data, scale=1.0, offset=0.0):
+ """
+ Copies pixel data from a flattened sequence object into the image. The
+ values should start at the upper left corner (0, 0), continue to the
+ end of the line, followed directly by the first value of the second
+ line, and so on. Data will be read until either the image or the
+ sequence ends. The scale and offset values are used to adjust the
+ sequence values: **pixel = value*scale + offset**.
+
+ :param data: A flattened sequence object.
+ :param scale: An optional scale value. The default is 1.0.
+ :param offset: An optional offset value. The default is 0.0.
+ """
+
+ self._ensure_mutable()
+
+ self.im.putdata(data, scale, offset)
+
+ def putpalette(self, data, rawmode="RGB"):
+ """
+ Attaches a palette to this image. The image must be a "P", "PA", "L"
+ or "LA" image.
+
+ The palette sequence must contain at most 256 colors, made up of one
+ integer value for each channel in the raw mode.
+ For example, if the raw mode is "RGB", then it can contain at most 768
+ values, made up of red, green and blue values for the corresponding pixel
+ index in the 256 colors.
+ If the raw mode is "RGBA", then it can contain at most 1024 values,
+ containing red, green, blue and alpha values.
+
+ Alternatively, an 8-bit string may be used instead of an integer sequence.
+
+ :param data: A palette sequence (either a list or a string).
+ :param rawmode: The raw mode of the palette. Either "RGB", "RGBA", or a mode
+ that can be transformed to "RGB" or "RGBA" (e.g. "R", "BGR;15", "RGBA;L").
+ """
+ from . import ImagePalette
+
+ if self.mode not in ("L", "LA", "P", "PA"):
+ msg = "illegal image mode"
+ raise ValueError(msg)
+ if isinstance(data, ImagePalette.ImagePalette):
+ palette = ImagePalette.raw(data.rawmode, data.palette)
+ else:
+ if not isinstance(data, bytes):
+ data = bytes(data)
+ palette = ImagePalette.raw(rawmode, data)
+ self._mode = "PA" if "A" in self.mode else "P"
+ self.palette = palette
+ self.palette.mode = "RGB"
+ self.load() # install new palette
+
+ def putpixel(self, xy, value):
+ """
+ Modifies the pixel at the given position. The color is given as
+ a single numerical value for single-band images, and a tuple for
+ multi-band images. In addition to this, RGB and RGBA tuples are
+ accepted for P and PA images.
+
+ Note that this method is relatively slow. For more extensive changes,
+ use :py:meth:`~PIL.Image.Image.paste` or the :py:mod:`~PIL.ImageDraw`
+ module instead.
+
+ See:
+
+ * :py:meth:`~PIL.Image.Image.paste`
+ * :py:meth:`~PIL.Image.Image.putdata`
+ * :py:mod:`~PIL.ImageDraw`
+
+ :param xy: The pixel coordinate, given as (x, y). See
+ :ref:`coordinate-system`.
+ :param value: The pixel value.
+ """
+
+ if self.readonly:
+ self._copy()
+ self.load()
+
+ if self.pyaccess:
+ return self.pyaccess.putpixel(xy, value)
+
+ if (
+ self.mode in ("P", "PA")
+ and isinstance(value, (list, tuple))
+ and len(value) in [3, 4]
+ ):
+ # RGB or RGBA value for a P or PA image
+ if self.mode == "PA":
+ alpha = value[3] if len(value) == 4 else 255
+ value = value[:3]
+ value = self.palette.getcolor(value, self)
+ if self.mode == "PA":
+ value = (value, alpha)
+ return self.im.putpixel(xy, value)
+
+ def remap_palette(self, dest_map, source_palette=None):
+ """
+ Rewrites the image to reorder the palette.
+
+ :param dest_map: A list of indexes into the original palette.
+ e.g. ``[1,0]`` would swap a two item palette, and ``list(range(256))``
+ is the identity transform.
+ :param source_palette: Bytes or None.
+ :returns: An :py:class:`~PIL.Image.Image` object.
+
+ """
+ from . import ImagePalette
+
+ if self.mode not in ("L", "P"):
+ msg = "illegal image mode"
+ raise ValueError(msg)
+
+ bands = 3
+ palette_mode = "RGB"
+ if source_palette is None:
+ if self.mode == "P":
+ self.load()
+ palette_mode = self.im.getpalettemode()
+ if palette_mode == "RGBA":
+ bands = 4
+ source_palette = self.im.getpalette(palette_mode, palette_mode)
+ else: # L-mode
+ source_palette = bytearray(i // 3 for i in range(768))
+
+ palette_bytes = b""
+ new_positions = [0] * 256
+
+ # pick only the used colors from the palette
+ for i, oldPosition in enumerate(dest_map):
+ palette_bytes += source_palette[
+ oldPosition * bands : oldPosition * bands + bands
+ ]
+ new_positions[oldPosition] = i
+
+ # replace the palette color id of all pixel with the new id
+
+ # Palette images are [0..255], mapped through a 1 or 3
+ # byte/color map. We need to remap the whole image
+ # from palette 1 to palette 2. New_positions is
+ # an array of indexes into palette 1. Palette 2 is
+ # palette 1 with any holes removed.
+
+ # We're going to leverage the convert mechanism to use the
+ # C code to remap the image from palette 1 to palette 2,
+ # by forcing the source image into 'L' mode and adding a
+ # mapping 'L' mode palette, then converting back to 'L'
+ # sans palette thus converting the image bytes, then
+ # assigning the optimized RGB palette.
+
+ # perf reference, 9500x4000 gif, w/~135 colors
+ # 14 sec prepatch, 1 sec postpatch with optimization forced.
+
+ mapping_palette = bytearray(new_positions)
+
+ m_im = self.copy()
+ m_im._mode = "P"
+
+ m_im.palette = ImagePalette.ImagePalette(
+ palette_mode, palette=mapping_palette * bands
+ )
+ # possibly set palette dirty, then
+ # m_im.putpalette(mapping_palette, 'L') # converts to 'P'
+ # or just force it.
+ # UNDONE -- this is part of the general issue with palettes
+ m_im.im.putpalette(palette_mode + ";L", m_im.palette.tobytes())
+
+ m_im = m_im.convert("L")
+
+ m_im.putpalette(palette_bytes, palette_mode)
+ m_im.palette = ImagePalette.ImagePalette(palette_mode, palette=palette_bytes)
+
+ if "transparency" in self.info:
+ try:
+ m_im.info["transparency"] = dest_map.index(self.info["transparency"])
+ except ValueError:
+ if "transparency" in m_im.info:
+ del m_im.info["transparency"]
+
+ return m_im
+
+ def _get_safe_box(self, size, resample, box):
+ """Expands the box so it includes adjacent pixels
+ that may be used by resampling with the given resampling filter.
+ """
+ filter_support = _filters_support[resample] - 0.5
+ scale_x = (box[2] - box[0]) / size[0]
+ scale_y = (box[3] - box[1]) / size[1]
+ support_x = filter_support * scale_x
+ support_y = filter_support * scale_y
+
+ return (
+ max(0, int(box[0] - support_x)),
+ max(0, int(box[1] - support_y)),
+ min(self.size[0], math.ceil(box[2] + support_x)),
+ min(self.size[1], math.ceil(box[3] + support_y)),
+ )
+
+ def resize(self, size, resample=None, box=None, reducing_gap=None):
+ """
+ Returns a resized copy of this image.
+
+ :param size: The requested size in pixels, as a 2-tuple:
+ (width, height).
+ :param resample: An optional resampling filter. This can be
+ one of :py:data:`Resampling.NEAREST`, :py:data:`Resampling.BOX`,
+ :py:data:`Resampling.BILINEAR`, :py:data:`Resampling.HAMMING`,
+ :py:data:`Resampling.BICUBIC` or :py:data:`Resampling.LANCZOS`.
+ If the image has mode "1" or "P", it is always set to
+ :py:data:`Resampling.NEAREST`. If the image mode specifies a number
+ of bits, such as "I;16", then the default filter is
+ :py:data:`Resampling.NEAREST`. Otherwise, the default filter is
+ :py:data:`Resampling.BICUBIC`. See: :ref:`concept-filters`.
+ :param box: An optional 4-tuple of floats providing
+ the source image region to be scaled.
+ The values must be within (0, 0, width, height) rectangle.
+ If omitted or None, the entire source is used.
+ :param reducing_gap: Apply optimization by resizing the image
+ in two steps. First, reducing the image by integer times
+ using :py:meth:`~PIL.Image.Image.reduce`.
+ Second, resizing using regular resampling. The last step
+ changes size no less than by ``reducing_gap`` times.
+ ``reducing_gap`` may be None (no first step is performed)
+ or should be greater than 1.0. The bigger ``reducing_gap``,
+ the closer the result to the fair resampling.
+ The smaller ``reducing_gap``, the faster resizing.
+ With ``reducing_gap`` greater or equal to 3.0, the result is
+ indistinguishable from fair resampling in most cases.
+ The default value is None (no optimization).
+ :returns: An :py:class:`~PIL.Image.Image` object.
+ """
+
+ if resample is None:
+ type_special = ";" in self.mode
+ resample = Resampling.NEAREST if type_special else Resampling.BICUBIC
+ elif resample not in (
+ Resampling.NEAREST,
+ Resampling.BILINEAR,
+ Resampling.BICUBIC,
+ Resampling.LANCZOS,
+ Resampling.BOX,
+ Resampling.HAMMING,
+ ):
+ msg = f"Unknown resampling filter ({resample})."
+
+ filters = [
+ f"{filter[1]} ({filter[0]})"
+ for filter in (
+ (Resampling.NEAREST, "Image.Resampling.NEAREST"),
+ (Resampling.LANCZOS, "Image.Resampling.LANCZOS"),
+ (Resampling.BILINEAR, "Image.Resampling.BILINEAR"),
+ (Resampling.BICUBIC, "Image.Resampling.BICUBIC"),
+ (Resampling.BOX, "Image.Resampling.BOX"),
+ (Resampling.HAMMING, "Image.Resampling.HAMMING"),
+ )
+ ]
+ msg += " Use " + ", ".join(filters[:-1]) + " or " + filters[-1]
+ raise ValueError(msg)
+
+ if reducing_gap is not None and reducing_gap < 1.0:
+ msg = "reducing_gap must be 1.0 or greater"
+ raise ValueError(msg)
+
+ size = tuple(size)
+
+ self.load()
+ if box is None:
+ box = (0, 0) + self.size
+ else:
+ box = tuple(box)
+
+ if self.size == size and box == (0, 0) + self.size:
+ return self.copy()
+
+ if self.mode in ("1", "P"):
+ resample = Resampling.NEAREST
+
+ if self.mode in ["LA", "RGBA"] and resample != Resampling.NEAREST:
+ im = self.convert({"LA": "La", "RGBA": "RGBa"}[self.mode])
+ im = im.resize(size, resample, box)
+ return im.convert(self.mode)
+
+ self.load()
+
+ if reducing_gap is not None and resample != Resampling.NEAREST:
+ factor_x = int((box[2] - box[0]) / size[0] / reducing_gap) or 1
+ factor_y = int((box[3] - box[1]) / size[1] / reducing_gap) or 1
+ if factor_x > 1 or factor_y > 1:
+ reduce_box = self._get_safe_box(size, resample, box)
+ factor = (factor_x, factor_y)
+ if callable(self.reduce):
+ self = self.reduce(factor, box=reduce_box)
+ else:
+ self = Image.reduce(self, factor, box=reduce_box)
+ box = (
+ (box[0] - reduce_box[0]) / factor_x,
+ (box[1] - reduce_box[1]) / factor_y,
+ (box[2] - reduce_box[0]) / factor_x,
+ (box[3] - reduce_box[1]) / factor_y,
+ )
+
+ return self._new(self.im.resize(size, resample, box))
+
+ def reduce(self, factor, box=None):
+ """
+ Returns a copy of the image reduced ``factor`` times.
+ If the size of the image is not dividable by ``factor``,
+ the resulting size will be rounded up.
+
+ :param factor: A greater than 0 integer or tuple of two integers
+ for width and height separately.
+ :param box: An optional 4-tuple of ints providing
+ the source image region to be reduced.
+ The values must be within ``(0, 0, width, height)`` rectangle.
+ If omitted or ``None``, the entire source is used.
+ """
+ if not isinstance(factor, (list, tuple)):
+ factor = (factor, factor)
+
+ if box is None:
+ box = (0, 0) + self.size
+ else:
+ box = tuple(box)
+
+ if factor == (1, 1) and box == (0, 0) + self.size:
+ return self.copy()
+
+ if self.mode in ["LA", "RGBA"]:
+ im = self.convert({"LA": "La", "RGBA": "RGBa"}[self.mode])
+ im = im.reduce(factor, box)
+ return im.convert(self.mode)
+
+ self.load()
+
+ return self._new(self.im.reduce(factor, box))
+
+ def rotate(
+ self,
+ angle,
+ resample=Resampling.NEAREST,
+ expand=0,
+ center=None,
+ translate=None,
+ fillcolor=None,
+ ):
+ """
+ Returns a rotated copy of this image. This method returns a
+ copy of this image, rotated the given number of degrees counter
+ clockwise around its centre.
+
+ :param angle: In degrees counter clockwise.
+ :param resample: An optional resampling filter. This can be
+ one of :py:data:`Resampling.NEAREST` (use nearest neighbour),
+ :py:data:`Resampling.BILINEAR` (linear interpolation in a 2x2
+ environment), or :py:data:`Resampling.BICUBIC` (cubic spline
+ interpolation in a 4x4 environment). If omitted, or if the image has
+ mode "1" or "P", it is set to :py:data:`Resampling.NEAREST`.
+ See :ref:`concept-filters`.
+ :param expand: Optional expansion flag. If true, expands the output
+ image to make it large enough to hold the entire rotated image.
+ If false or omitted, make the output image the same size as the
+ input image. Note that the expand flag assumes rotation around
+ the center and no translation.
+ :param center: Optional center of rotation (a 2-tuple). Origin is
+ the upper left corner. Default is the center of the image.
+ :param translate: An optional post-rotate translation (a 2-tuple).
+ :param fillcolor: An optional color for area outside the rotated image.
+ :returns: An :py:class:`~PIL.Image.Image` object.
+ """
+
+ angle = angle % 360.0
+
+ # Fast paths regardless of filter, as long as we're not
+ # translating or changing the center.
+ if not (center or translate):
+ if angle == 0:
+ return self.copy()
+ if angle == 180:
+ return self.transpose(Transpose.ROTATE_180)
+ if angle in (90, 270) and (expand or self.width == self.height):
+ return self.transpose(
+ Transpose.ROTATE_90 if angle == 90 else Transpose.ROTATE_270
+ )
+
+ # Calculate the affine matrix. Note that this is the reverse
+ # transformation (from destination image to source) because we
+ # want to interpolate the (discrete) destination pixel from
+ # the local area around the (floating) source pixel.
+
+ # The matrix we actually want (note that it operates from the right):
+ # (1, 0, tx) (1, 0, cx) ( cos a, sin a, 0) (1, 0, -cx)
+ # (0, 1, ty) * (0, 1, cy) * (-sin a, cos a, 0) * (0, 1, -cy)
+ # (0, 0, 1) (0, 0, 1) ( 0, 0, 1) (0, 0, 1)
+
+ # The reverse matrix is thus:
+ # (1, 0, cx) ( cos -a, sin -a, 0) (1, 0, -cx) (1, 0, -tx)
+ # (0, 1, cy) * (-sin -a, cos -a, 0) * (0, 1, -cy) * (0, 1, -ty)
+ # (0, 0, 1) ( 0, 0, 1) (0, 0, 1) (0, 0, 1)
+
+ # In any case, the final translation may be updated at the end to
+ # compensate for the expand flag.
+
+ w, h = self.size
+
+ if translate is None:
+ post_trans = (0, 0)
+ else:
+ post_trans = translate
+ if center is None:
+ # FIXME These should be rounded to ints?
+ rotn_center = (w / 2.0, h / 2.0)
+ else:
+ rotn_center = center
+
+ angle = -math.radians(angle)
+ matrix = [
+ round(math.cos(angle), 15),
+ round(math.sin(angle), 15),
+ 0.0,
+ round(-math.sin(angle), 15),
+ round(math.cos(angle), 15),
+ 0.0,
+ ]
+
+ def transform(x, y, matrix):
+ (a, b, c, d, e, f) = matrix
+ return a * x + b * y + c, d * x + e * y + f
+
+ matrix[2], matrix[5] = transform(
+ -rotn_center[0] - post_trans[0], -rotn_center[1] - post_trans[1], matrix
+ )
+ matrix[2] += rotn_center[0]
+ matrix[5] += rotn_center[1]
+
+ if expand:
+ # calculate output size
+ xx = []
+ yy = []
+ for x, y in ((0, 0), (w, 0), (w, h), (0, h)):
+ x, y = transform(x, y, matrix)
+ xx.append(x)
+ yy.append(y)
+ nw = math.ceil(max(xx)) - math.floor(min(xx))
+ nh = math.ceil(max(yy)) - math.floor(min(yy))
+
+ # We multiply a translation matrix from the right. Because of its
+ # special form, this is the same as taking the image of the
+ # translation vector as new translation vector.
+ matrix[2], matrix[5] = transform(-(nw - w) / 2.0, -(nh - h) / 2.0, matrix)
+ w, h = nw, nh
+
+ return self.transform(
+ (w, h), Transform.AFFINE, matrix, resample, fillcolor=fillcolor
+ )
+
+ def save(self, fp, format=None, **params) -> None:
+ """
+ Saves this image under the given filename. If no format is
+ specified, the format to use is determined from the filename
+ extension, if possible.
+
+ Keyword options can be used to provide additional instructions
+ to the writer. If a writer doesn't recognise an option, it is
+ silently ignored. The available options are described in the
+ :doc:`image format documentation
+ <../handbook/image-file-formats>` for each writer.
+
+ You can use a file object instead of a filename. In this case,
+ you must always specify the format. The file object must
+ implement the ``seek``, ``tell``, and ``write``
+ methods, and be opened in binary mode.
+
+ :param fp: A filename (string), pathlib.Path object or file object.
+ :param format: Optional format override. If omitted, the
+ format to use is determined from the filename extension.
+ If a file object was used instead of a filename, this
+ parameter should always be used.
+ :param params: Extra parameters to the image writer.
+ :returns: None
+ :exception ValueError: If the output format could not be determined
+ from the file name. Use the format option to solve this.
+ :exception OSError: If the file could not be written. The file
+ may have been created, and may contain partial data.
+ """
+
+ filename = ""
+ open_fp = False
+ if isinstance(fp, Path):
+ filename = str(fp)
+ open_fp = True
+ elif is_path(fp):
+ filename = fp
+ open_fp = True
+ elif fp == sys.stdout:
+ try:
+ fp = sys.stdout.buffer
+ except AttributeError:
+ pass
+ if not filename and hasattr(fp, "name") and is_path(fp.name):
+ # only set the name for metadata purposes
+ filename = fp.name
+
+ # may mutate self!
+ self._ensure_mutable()
+
+ save_all = params.pop("save_all", False)
+ self.encoderinfo = params
+ self.encoderconfig = ()
+
+ preinit()
+
+ ext = os.path.splitext(filename)[1].lower()
+
+ if not format:
+ if ext not in EXTENSION:
+ init()
+ try:
+ format = EXTENSION[ext]
+ except KeyError as e:
+ msg = f"unknown file extension: {ext}"
+ raise ValueError(msg) from e
+
+ if format.upper() not in SAVE:
+ init()
+ if save_all:
+ save_handler = SAVE_ALL[format.upper()]
+ else:
+ save_handler = SAVE[format.upper()]
+
+ created = False
+ if open_fp:
+ created = not os.path.exists(filename)
+ if params.get("append", False):
+ # Open also for reading ("+"), because TIFF save_all
+ # writer needs to go back and edit the written data.
+ fp = builtins.open(filename, "r+b")
+ else:
+ fp = builtins.open(filename, "w+b")
+
+ try:
+ save_handler(self, fp, filename)
+ except Exception:
+ if open_fp:
+ fp.close()
+ if created:
+ try:
+ os.remove(filename)
+ except PermissionError:
+ pass
+ raise
+ if open_fp:
+ fp.close()
+
+ def seek(self, frame) -> Image:
+ """
+ Seeks to the given frame in this sequence file. If you seek
+ beyond the end of the sequence, the method raises an
+ ``EOFError`` exception. When a sequence file is opened, the
+ library automatically seeks to frame 0.
+
+ See :py:meth:`~PIL.Image.Image.tell`.
+
+ If defined, :attr:`~PIL.Image.Image.n_frames` refers to the
+ number of available frames.
+
+ :param frame: Frame number, starting at 0.
+ :exception EOFError: If the call attempts to seek beyond the end
+ of the sequence.
+ """
+
+ # overridden by file handlers
+ if frame != 0:
+ msg = "no more images in file"
+ raise EOFError(msg)
+
+ def show(self, title=None):
+ """
+ Displays this image. This method is mainly intended for debugging purposes.
+
+ This method calls :py:func:`PIL.ImageShow.show` internally. You can use
+ :py:func:`PIL.ImageShow.register` to override its default behaviour.
+
+ The image is first saved to a temporary file. By default, it will be in
+ PNG format.
+
+ On Unix, the image is then opened using the **xdg-open**, **display**,
+ **gm**, **eog** or **xv** utility, depending on which one can be found.
+
+ On macOS, the image is opened with the native Preview application.
+
+ On Windows, the image is opened with the standard PNG display utility.
+
+ :param title: Optional title to use for the image window, where possible.
+ """
+
+ _show(self, title=title)
+
+ def split(self):
+ """
+ Split this image into individual bands. This method returns a
+ tuple of individual image bands from an image. For example,
+ splitting an "RGB" image creates three new images each
+ containing a copy of one of the original bands (red, green,
+ blue).
+
+ If you need only one band, :py:meth:`~PIL.Image.Image.getchannel`
+ method can be more convenient and faster.
+
+ :returns: A tuple containing bands.
+ """
+
+ self.load()
+ if self.im.bands == 1:
+ ims = [self.copy()]
+ else:
+ ims = map(self._new, self.im.split())
+ return tuple(ims)
+
+ def getchannel(self, channel):
+ """
+ Returns an image containing a single channel of the source image.
+
+ :param channel: What channel to return. Could be index
+ (0 for "R" channel of "RGB") or channel name
+ ("A" for alpha channel of "RGBA").
+ :returns: An image in "L" mode.
+
+ .. versionadded:: 4.3.0
+ """
+ self.load()
+
+ if isinstance(channel, str):
+ try:
+ channel = self.getbands().index(channel)
+ except ValueError as e:
+ msg = f'The image has no channel "{channel}"'
+ raise ValueError(msg) from e
+
+ return self._new(self.im.getband(channel))
+
+ def tell(self) -> int:
+ """
+ Returns the current frame number. See :py:meth:`~PIL.Image.Image.seek`.
+
+ If defined, :attr:`~PIL.Image.Image.n_frames` refers to the
+ number of available frames.
+
+ :returns: Frame number, starting with 0.
+ """
+ return 0
+
+ def thumbnail(self, size, resample=Resampling.BICUBIC, reducing_gap=2.0):
+ """
+ Make this image into a thumbnail. This method modifies the
+ image to contain a thumbnail version of itself, no larger than
+ the given size. This method calculates an appropriate thumbnail
+ size to preserve the aspect of the image, calls the
+ :py:meth:`~PIL.Image.Image.draft` method to configure the file reader
+ (where applicable), and finally resizes the image.
+
+ Note that this function modifies the :py:class:`~PIL.Image.Image`
+ object in place. If you need to use the full resolution image as well,
+ apply this method to a :py:meth:`~PIL.Image.Image.copy` of the original
+ image.
+
+ :param size: The requested size in pixels, as a 2-tuple:
+ (width, height).
+ :param resample: Optional resampling filter. This can be one
+ of :py:data:`Resampling.NEAREST`, :py:data:`Resampling.BOX`,
+ :py:data:`Resampling.BILINEAR`, :py:data:`Resampling.HAMMING`,
+ :py:data:`Resampling.BICUBIC` or :py:data:`Resampling.LANCZOS`.
+ If omitted, it defaults to :py:data:`Resampling.BICUBIC`.
+ (was :py:data:`Resampling.NEAREST` prior to version 2.5.0).
+ See: :ref:`concept-filters`.
+ :param reducing_gap: Apply optimization by resizing the image
+ in two steps. First, reducing the image by integer times
+ using :py:meth:`~PIL.Image.Image.reduce` or
+ :py:meth:`~PIL.Image.Image.draft` for JPEG images.
+ Second, resizing using regular resampling. The last step
+ changes size no less than by ``reducing_gap`` times.
+ ``reducing_gap`` may be None (no first step is performed)
+ or should be greater than 1.0. The bigger ``reducing_gap``,
+ the closer the result to the fair resampling.
+ The smaller ``reducing_gap``, the faster resizing.
+ With ``reducing_gap`` greater or equal to 3.0, the result is
+ indistinguishable from fair resampling in most cases.
+ The default value is 2.0 (very close to fair resampling
+ while still being faster in many cases).
+ :returns: None
+ """
+
+ provided_size = tuple(map(math.floor, size))
+
+ def preserve_aspect_ratio():
+ def round_aspect(number, key):
+ return max(min(math.floor(number), math.ceil(number), key=key), 1)
+
+ x, y = provided_size
+ if x >= self.width and y >= self.height:
+ return
+
+ aspect = self.width / self.height
+ if x / y >= aspect:
+ x = round_aspect(y * aspect, key=lambda n: abs(aspect - n / y))
+ else:
+ y = round_aspect(
+ x / aspect, key=lambda n: 0 if n == 0 else abs(aspect - x / n)
+ )
+ return x, y
+
+ box = None
+ if reducing_gap is not None:
+ size = preserve_aspect_ratio()
+ if size is None:
+ return
+
+ res = self.draft(None, (size[0] * reducing_gap, size[1] * reducing_gap))
+ if res is not None:
+ box = res[1]
+ if box is None:
+ self.load()
+
+ # load() may have changed the size of the image
+ size = preserve_aspect_ratio()
+ if size is None:
+ return
+
+ if self.size != size:
+ im = self.resize(size, resample, box=box, reducing_gap=reducing_gap)
+
+ self.im = im.im
+ self._size = size
+ self._mode = self.im.mode
+
+ self.readonly = 0
+ self.pyaccess = None
+
+ # FIXME: the different transform methods need further explanation
+ # instead of bloating the method docs, add a separate chapter.
+ def transform(
+ self,
+ size,
+ method,
+ data=None,
+ resample=Resampling.NEAREST,
+ fill=1,
+ fillcolor=None,
+ ) -> Image:
+ """
+ Transforms this image. This method creates a new image with the
+ given size, and the same mode as the original, and copies data
+ to the new image using the given transform.
+
+ :param size: The output size in pixels, as a 2-tuple:
+ (width, height).
+ :param method: The transformation method. This is one of
+ :py:data:`Transform.EXTENT` (cut out a rectangular subregion),
+ :py:data:`Transform.AFFINE` (affine transform),
+ :py:data:`Transform.PERSPECTIVE` (perspective transform),
+ :py:data:`Transform.QUAD` (map a quadrilateral to a rectangle), or
+ :py:data:`Transform.MESH` (map a number of source quadrilaterals
+ in one operation).
+
+ It may also be an :py:class:`~PIL.Image.ImageTransformHandler`
+ object::
+
+ class Example(Image.ImageTransformHandler):
+ def transform(self, size, data, resample, fill=1):
+ # Return result
+
+ It may also be an object with a ``method.getdata`` method
+ that returns a tuple supplying new ``method`` and ``data`` values::
+
+ class Example:
+ def getdata(self):
+ method = Image.Transform.EXTENT
+ data = (0, 0, 100, 100)
+ return method, data
+ :param data: Extra data to the transformation method.
+ :param resample: Optional resampling filter. It can be one of
+ :py:data:`Resampling.NEAREST` (use nearest neighbour),
+ :py:data:`Resampling.BILINEAR` (linear interpolation in a 2x2
+ environment), or :py:data:`Resampling.BICUBIC` (cubic spline
+ interpolation in a 4x4 environment). If omitted, or if the image
+ has mode "1" or "P", it is set to :py:data:`Resampling.NEAREST`.
+ See: :ref:`concept-filters`.
+ :param fill: If ``method`` is an
+ :py:class:`~PIL.Image.ImageTransformHandler` object, this is one of
+ the arguments passed to it. Otherwise, it is unused.
+ :param fillcolor: Optional fill color for the area outside the
+ transform in the output image.
+ :returns: An :py:class:`~PIL.Image.Image` object.
+ """
+
+ if self.mode in ("LA", "RGBA") and resample != Resampling.NEAREST:
+ return (
+ self.convert({"LA": "La", "RGBA": "RGBa"}[self.mode])
+ .transform(size, method, data, resample, fill, fillcolor)
+ .convert(self.mode)
+ )
+
+ if isinstance(method, ImageTransformHandler):
+ return method.transform(size, self, resample=resample, fill=fill)
+
+ if hasattr(method, "getdata"):
+ # compatibility w. old-style transform objects
+ method, data = method.getdata()
+
+ if data is None:
+ msg = "missing method data"
+ raise ValueError(msg)
+
+ im = new(self.mode, size, fillcolor)
+ if self.mode == "P" and self.palette:
+ im.palette = self.palette.copy()
+ im.info = self.info.copy()
+ if method == Transform.MESH:
+ # list of quads
+ for box, quad in data:
+ im.__transformer(
+ box, self, Transform.QUAD, quad, resample, fillcolor is None
+ )
+ else:
+ im.__transformer(
+ (0, 0) + size, self, method, data, resample, fillcolor is None
+ )
+
+ return im
+
+ def __transformer(
+ self, box, image, method, data, resample=Resampling.NEAREST, fill=1
+ ):
+ w = box[2] - box[0]
+ h = box[3] - box[1]
+
+ if method == Transform.AFFINE:
+ data = data[:6]
+
+ elif method == Transform.EXTENT:
+ # convert extent to an affine transform
+ x0, y0, x1, y1 = data
+ xs = (x1 - x0) / w
+ ys = (y1 - y0) / h
+ method = Transform.AFFINE
+ data = (xs, 0, x0, 0, ys, y0)
+
+ elif method == Transform.PERSPECTIVE:
+ data = data[:8]
+
+ elif method == Transform.QUAD:
+ # quadrilateral warp. data specifies the four corners
+ # given as NW, SW, SE, and NE.
+ nw = data[:2]
+ sw = data[2:4]
+ se = data[4:6]
+ ne = data[6:8]
+ x0, y0 = nw
+ As = 1.0 / w
+ At = 1.0 / h
+ data = (
+ x0,
+ (ne[0] - x0) * As,
+ (sw[0] - x0) * At,
+ (se[0] - sw[0] - ne[0] + x0) * As * At,
+ y0,
+ (ne[1] - y0) * As,
+ (sw[1] - y0) * At,
+ (se[1] - sw[1] - ne[1] + y0) * As * At,
+ )
+
+ else:
+ msg = "unknown transformation method"
+ raise ValueError(msg)
+
+ if resample not in (
+ Resampling.NEAREST,
+ Resampling.BILINEAR,
+ Resampling.BICUBIC,
+ ):
+ if resample in (Resampling.BOX, Resampling.HAMMING, Resampling.LANCZOS):
+ msg = {
+ Resampling.BOX: "Image.Resampling.BOX",
+ Resampling.HAMMING: "Image.Resampling.HAMMING",
+ Resampling.LANCZOS: "Image.Resampling.LANCZOS",
+ }[resample] + f" ({resample}) cannot be used."
+ else:
+ msg = f"Unknown resampling filter ({resample})."
+
+ filters = [
+ f"{filter[1]} ({filter[0]})"
+ for filter in (
+ (Resampling.NEAREST, "Image.Resampling.NEAREST"),
+ (Resampling.BILINEAR, "Image.Resampling.BILINEAR"),
+ (Resampling.BICUBIC, "Image.Resampling.BICUBIC"),
+ )
+ ]
+ msg += " Use " + ", ".join(filters[:-1]) + " or " + filters[-1]
+ raise ValueError(msg)
+
+ image.load()
+
+ self.load()
+
+ if image.mode in ("1", "P"):
+ resample = Resampling.NEAREST
+
+ self.im.transform2(box, image.im, method, data, resample, fill)
+
+ def transpose(self, method):
+ """
+ Transpose image (flip or rotate in 90 degree steps)
+
+ :param method: One of :py:data:`Transpose.FLIP_LEFT_RIGHT`,
+ :py:data:`Transpose.FLIP_TOP_BOTTOM`, :py:data:`Transpose.ROTATE_90`,
+ :py:data:`Transpose.ROTATE_180`, :py:data:`Transpose.ROTATE_270`,
+ :py:data:`Transpose.TRANSPOSE` or :py:data:`Transpose.TRANSVERSE`.
+ :returns: Returns a flipped or rotated copy of this image.
+ """
+
+ self.load()
+ return self._new(self.im.transpose(method))
+
+ def effect_spread(self, distance):
+ """
+ Randomly spread pixels in an image.
+
+ :param distance: Distance to spread pixels.
+ """
+ self.load()
+ return self._new(self.im.effect_spread(distance))
+
+ def toqimage(self):
+ """Returns a QImage copy of this image"""
+ from . import ImageQt
+
+ if not ImageQt.qt_is_installed:
+ msg = "Qt bindings are not installed"
+ raise ImportError(msg)
+ return ImageQt.toqimage(self)
+
+ def toqpixmap(self):
+ """Returns a QPixmap copy of this image"""
+ from . import ImageQt
+
+ if not ImageQt.qt_is_installed:
+ msg = "Qt bindings are not installed"
+ raise ImportError(msg)
+ return ImageQt.toqpixmap(self)
+
+
+# --------------------------------------------------------------------
+# Abstract handlers.
+
+
+class ImagePointHandler:
+ """
+ Used as a mixin by point transforms
+ (for use with :py:meth:`~PIL.Image.Image.point`)
+ """
+
+ pass
+
+
+class ImageTransformHandler:
+ """
+ Used as a mixin by geometry transforms
+ (for use with :py:meth:`~PIL.Image.Image.transform`)
+ """
+
+ pass
+
+
+# --------------------------------------------------------------------
+# Factories
+
+#
+# Debugging
+
+
+def _wedge():
+ """Create grayscale wedge (for debugging only)"""
+
+ return Image()._new(core.wedge("L"))
+
+
+def _check_size(size):
+ """
+ Common check to enforce type and sanity check on size tuples
+
+ :param size: Should be a 2 tuple of (width, height)
+ :returns: True, or raises a ValueError
+ """
+
+ if not isinstance(size, (list, tuple)):
+ msg = "Size must be a tuple"
+ raise ValueError(msg)
+ if len(size) != 2:
+ msg = "Size must be a tuple of length 2"
+ raise ValueError(msg)
+ if size[0] < 0 or size[1] < 0:
+ msg = "Width and height must be >= 0"
+ raise ValueError(msg)
+
+ return True
+
+
+def new(mode, size, color=0) -> Image:
+ """
+ Creates a new image with the given mode and size.
+
+ :param mode: The mode to use for the new image. See:
+ :ref:`concept-modes`.
+ :param size: A 2-tuple, containing (width, height) in pixels.
+ :param color: What color to use for the image. Default is black.
+ If given, this should be a single integer or floating point value
+ for single-band modes, and a tuple for multi-band modes (one value
+ per band). When creating RGB or HSV images, you can also use color
+ strings as supported by the ImageColor module. If the color is
+ None, the image is not initialised.
+ :returns: An :py:class:`~PIL.Image.Image` object.
+ """
+
+ _check_size(size)
+
+ if color is None:
+ # don't initialize
+ return Image()._new(core.new(mode, size))
+
+ if isinstance(color, str):
+ # css3-style specifier
+
+ from . import ImageColor
+
+ color = ImageColor.getcolor(color, mode)
+
+ im = Image()
+ if mode == "P" and isinstance(color, (list, tuple)) and len(color) in [3, 4]:
+ # RGB or RGBA value for a P image
+ from . import ImagePalette
+
+ im.palette = ImagePalette.ImagePalette()
+ color = im.palette.getcolor(color)
+ return im._new(core.fill(mode, size, color))
+
+
+def frombytes(mode, size, data, decoder_name="raw", *args) -> Image:
+ """
+ Creates a copy of an image memory from pixel data in a buffer.
+
+ In its simplest form, this function takes three arguments
+ (mode, size, and unpacked pixel data).
+
+ You can also use any pixel decoder supported by PIL. For more
+ information on available decoders, see the section
+ :ref:`Writing Your Own File Codec `.
+
+ Note that this function decodes pixel data only, not entire images.
+ If you have an entire image in a string, wrap it in a
+ :py:class:`~io.BytesIO` object, and use :py:func:`~PIL.Image.open` to load
+ it.
+
+ :param mode: The image mode. See: :ref:`concept-modes`.
+ :param size: The image size.
+ :param data: A byte buffer containing raw data for the given mode.
+ :param decoder_name: What decoder to use.
+ :param args: Additional parameters for the given decoder.
+ :returns: An :py:class:`~PIL.Image.Image` object.
+ """
+
+ _check_size(size)
+
+ im = new(mode, size)
+ if im.width != 0 and im.height != 0:
+ # may pass tuple instead of argument list
+ if len(args) == 1 and isinstance(args[0], tuple):
+ args = args[0]
+
+ if decoder_name == "raw" and args == ():
+ args = mode
+
+ im.frombytes(data, decoder_name, args)
+ return im
+
+
+def frombuffer(mode, size, data, decoder_name="raw", *args):
+ """
+ Creates an image memory referencing pixel data in a byte buffer.
+
+ This function is similar to :py:func:`~PIL.Image.frombytes`, but uses data
+ in the byte buffer, where possible. This means that changes to the
+ original buffer object are reflected in this image). Not all modes can
+ share memory; supported modes include "L", "RGBX", "RGBA", and "CMYK".
+
+ Note that this function decodes pixel data only, not entire images.
+ If you have an entire image file in a string, wrap it in a
+ :py:class:`~io.BytesIO` object, and use :py:func:`~PIL.Image.open` to load it.
+
+ In the current version, the default parameters used for the "raw" decoder
+ differs from that used for :py:func:`~PIL.Image.frombytes`. This is a
+ bug, and will probably be fixed in a future release. The current release
+ issues a warning if you do this; to disable the warning, you should provide
+ the full set of parameters. See below for details.
+
+ :param mode: The image mode. See: :ref:`concept-modes`.
+ :param size: The image size.
+ :param data: A bytes or other buffer object containing raw
+ data for the given mode.
+ :param decoder_name: What decoder to use.
+ :param args: Additional parameters for the given decoder. For the
+ default encoder ("raw"), it's recommended that you provide the
+ full set of parameters::
+
+ frombuffer(mode, size, data, "raw", mode, 0, 1)
+
+ :returns: An :py:class:`~PIL.Image.Image` object.
+
+ .. versionadded:: 1.1.4
+ """
+
+ _check_size(size)
+
+ # may pass tuple instead of argument list
+ if len(args) == 1 and isinstance(args[0], tuple):
+ args = args[0]
+
+ if decoder_name == "raw":
+ if args == ():
+ args = mode, 0, 1
+ if args[0] in _MAPMODES:
+ im = new(mode, (0, 0))
+ im = im._new(core.map_buffer(data, size, decoder_name, 0, args))
+ if mode == "P":
+ from . import ImagePalette
+
+ im.palette = ImagePalette.ImagePalette("RGB", im.im.getpalette("RGB"))
+ im.readonly = 1
+ return im
+
+ return frombytes(mode, size, data, decoder_name, args)
+
+
+def fromarray(obj, mode=None):
+ """
+ Creates an image memory from an object exporting the array interface
+ (using the buffer protocol)::
+
+ from PIL import Image
+ import numpy as np
+ a = np.zeros((5, 5))
+ im = Image.fromarray(a)
+
+ If ``obj`` is not contiguous, then the ``tobytes`` method is called
+ and :py:func:`~PIL.Image.frombuffer` is used.
+
+ In the case of NumPy, be aware that Pillow modes do not always correspond
+ to NumPy dtypes. Pillow modes only offer 1-bit pixels, 8-bit pixels,
+ 32-bit signed integer pixels, and 32-bit floating point pixels.
+
+ Pillow images can also be converted to arrays::
+
+ from PIL import Image
+ import numpy as np
+ im = Image.open("hopper.jpg")
+ a = np.asarray(im)
+
+ When converting Pillow images to arrays however, only pixel values are
+ transferred. This means that P and PA mode images will lose their palette.
+
+ :param obj: Object with array interface
+ :param mode: Optional mode to use when reading ``obj``. Will be determined from
+ type if ``None``.
+
+ This will not be used to convert the data after reading, but will be used to
+ change how the data is read::
+
+ from PIL import Image
+ import numpy as np
+ a = np.full((1, 1), 300)
+ im = Image.fromarray(a, mode="L")
+ im.getpixel((0, 0)) # 44
+ im = Image.fromarray(a, mode="RGB")
+ im.getpixel((0, 0)) # (44, 1, 0)
+
+ See: :ref:`concept-modes` for general information about modes.
+ :returns: An image object.
+
+ .. versionadded:: 1.1.6
+ """
+ arr = obj.__array_interface__
+ shape = arr["shape"]
+ ndim = len(shape)
+ strides = arr.get("strides", None)
+ if mode is None:
+ try:
+ typekey = (1, 1) + shape[2:], arr["typestr"]
+ except KeyError as e:
+ msg = "Cannot handle this data type"
+ raise TypeError(msg) from e
+ try:
+ mode, rawmode = _fromarray_typemap[typekey]
+ except KeyError as e:
+ typekey_shape, typestr = typekey
+ msg = f"Cannot handle this data type: {typekey_shape}, {typestr}"
+ raise TypeError(msg) from e
+ else:
+ rawmode = mode
+ if mode in ["1", "L", "I", "P", "F"]:
+ ndmax = 2
+ elif mode == "RGB":
+ ndmax = 3
+ else:
+ ndmax = 4
+ if ndim > ndmax:
+ msg = f"Too many dimensions: {ndim} > {ndmax}."
+ raise ValueError(msg)
+
+ size = 1 if ndim == 1 else shape[1], shape[0]
+ if strides is not None:
+ if hasattr(obj, "tobytes"):
+ obj = obj.tobytes()
+ else:
+ obj = obj.tostring()
+
+ return frombuffer(mode, size, obj, "raw", rawmode, 0, 1)
+
+
+def fromqimage(im):
+ """Creates an image instance from a QImage image"""
+ from . import ImageQt
+
+ if not ImageQt.qt_is_installed:
+ msg = "Qt bindings are not installed"
+ raise ImportError(msg)
+ return ImageQt.fromqimage(im)
+
+
+def fromqpixmap(im):
+ """Creates an image instance from a QPixmap image"""
+ from . import ImageQt
+
+ if not ImageQt.qt_is_installed:
+ msg = "Qt bindings are not installed"
+ raise ImportError(msg)
+ return ImageQt.fromqpixmap(im)
+
+
+_fromarray_typemap = {
+ # (shape, typestr) => mode, rawmode
+ # first two members of shape are set to one
+ ((1, 1), "|b1"): ("1", "1;8"),
+ ((1, 1), "|u1"): ("L", "L"),
+ ((1, 1), "|i1"): ("I", "I;8"),
+ ((1, 1), "u2"): ("I", "I;16B"),
+ ((1, 1), "i2"): ("I", "I;16BS"),
+ ((1, 1), "u4"): ("I", "I;32B"),
+ ((1, 1), "i4"): ("I", "I;32BS"),
+ ((1, 1), "f4"): ("F", "F;32BF"),
+ ((1, 1), "f8"): ("F", "F;64BF"),
+ ((1, 1, 2), "|u1"): ("LA", "LA"),
+ ((1, 1, 3), "|u1"): ("RGB", "RGB"),
+ ((1, 1, 4), "|u1"): ("RGBA", "RGBA"),
+ # shortcuts:
+ ((1, 1), _ENDIAN + "i4"): ("I", "I"),
+ ((1, 1), _ENDIAN + "f4"): ("F", "F"),
+}
+
+
+def _decompression_bomb_check(size):
+ if MAX_IMAGE_PIXELS is None:
+ return
+
+ pixels = max(1, size[0]) * max(1, size[1])
+
+ if pixels > 2 * MAX_IMAGE_PIXELS:
+ msg = (
+ f"Image size ({pixels} pixels) exceeds limit of {2 * MAX_IMAGE_PIXELS} "
+ "pixels, could be decompression bomb DOS attack."
+ )
+ raise DecompressionBombError(msg)
+
+ if pixels > MAX_IMAGE_PIXELS:
+ warnings.warn(
+ f"Image size ({pixels} pixels) exceeds limit of {MAX_IMAGE_PIXELS} pixels, "
+ "could be decompression bomb DOS attack.",
+ DecompressionBombWarning,
+ )
+
+
+def open(fp, mode="r", formats=None) -> Image:
+ """
+ Opens and identifies the given image file.
+
+ This is a lazy operation; this function identifies the file, but
+ the file remains open and the actual image data is not read from
+ the file until you try to process the data (or call the
+ :py:meth:`~PIL.Image.Image.load` method). See
+ :py:func:`~PIL.Image.new`. See :ref:`file-handling`.
+
+ :param fp: A filename (string), pathlib.Path object or a file object.
+ The file object must implement ``file.read``,
+ ``file.seek``, and ``file.tell`` methods,
+ and be opened in binary mode. The file object will also seek to zero
+ before reading.
+ :param mode: The mode. If given, this argument must be "r".
+ :param formats: A list or tuple of formats to attempt to load the file in.
+ This can be used to restrict the set of formats checked.
+ Pass ``None`` to try all supported formats. You can print the set of
+ available formats by running ``python3 -m PIL`` or using
+ the :py:func:`PIL.features.pilinfo` function.
+ :returns: An :py:class:`~PIL.Image.Image` object.
+ :exception FileNotFoundError: If the file cannot be found.
+ :exception PIL.UnidentifiedImageError: If the image cannot be opened and
+ identified.
+ :exception ValueError: If the ``mode`` is not "r", or if a ``StringIO``
+ instance is used for ``fp``.
+ :exception TypeError: If ``formats`` is not ``None``, a list or a tuple.
+ """
+
+ if mode != "r":
+ msg = f"bad mode {repr(mode)}"
+ raise ValueError(msg)
+ elif isinstance(fp, io.StringIO):
+ msg = (
+ "StringIO cannot be used to open an image. "
+ "Binary data must be used instead."
+ )
+ raise ValueError(msg)
+
+ if formats is None:
+ formats = ID
+ elif not isinstance(formats, (list, tuple)):
+ msg = "formats must be a list or tuple"
+ raise TypeError(msg)
+
+ exclusive_fp = False
+ filename = ""
+ if isinstance(fp, Path):
+ filename = str(fp.resolve())
+ elif is_path(fp):
+ filename = fp
+
+ if filename:
+ fp = builtins.open(filename, "rb")
+ exclusive_fp = True
+
+ try:
+ fp.seek(0)
+ except (AttributeError, io.UnsupportedOperation):
+ fp = io.BytesIO(fp.read())
+ exclusive_fp = True
+
+ prefix = fp.read(16)
+
+ preinit()
+
+ accept_warnings = []
+
+ def _open_core(fp, filename, prefix, formats):
+ for i in formats:
+ i = i.upper()
+ if i not in OPEN:
+ init()
+ try:
+ factory, accept = OPEN[i]
+ result = not accept or accept(prefix)
+ if type(result) in [str, bytes]:
+ accept_warnings.append(result)
+ elif result:
+ fp.seek(0)
+ im = factory(fp, filename)
+ _decompression_bomb_check(im.size)
+ return im
+ except (SyntaxError, IndexError, TypeError, struct.error):
+ # Leave disabled by default, spams the logs with image
+ # opening failures that are entirely expected.
+ # logger.debug("", exc_info=True)
+ continue
+ except BaseException:
+ if exclusive_fp:
+ fp.close()
+ raise
+ return None
+
+ im = _open_core(fp, filename, prefix, formats)
+
+ if im is None and formats is ID:
+ checked_formats = formats.copy()
+ if init():
+ im = _open_core(
+ fp,
+ filename,
+ prefix,
+ tuple(format for format in formats if format not in checked_formats),
+ )
+
+ if im:
+ im._exclusive_fp = exclusive_fp
+ return im
+
+ if exclusive_fp:
+ fp.close()
+ for message in accept_warnings:
+ warnings.warn(message)
+ msg = "cannot identify image file %r" % (filename if filename else fp)
+ raise UnidentifiedImageError(msg)
+
+
+#
+# Image processing.
+
+
+def alpha_composite(im1, im2):
+ """
+ Alpha composite im2 over im1.
+
+ :param im1: The first image. Must have mode RGBA.
+ :param im2: The second image. Must have mode RGBA, and the same size as
+ the first image.
+ :returns: An :py:class:`~PIL.Image.Image` object.
+ """
+
+ im1.load()
+ im2.load()
+ return im1._new(core.alpha_composite(im1.im, im2.im))
+
+
+def blend(im1, im2, alpha):
+ """
+ Creates a new image by interpolating between two input images, using
+ a constant alpha::
+
+ out = image1 * (1.0 - alpha) + image2 * alpha
+
+ :param im1: The first image.
+ :param im2: The second image. Must have the same mode and size as
+ the first image.
+ :param alpha: The interpolation alpha factor. If alpha is 0.0, a
+ copy of the first image is returned. If alpha is 1.0, a copy of
+ the second image is returned. There are no restrictions on the
+ alpha value. If necessary, the result is clipped to fit into
+ the allowed output range.
+ :returns: An :py:class:`~PIL.Image.Image` object.
+ """
+
+ im1.load()
+ im2.load()
+ return im1._new(core.blend(im1.im, im2.im, alpha))
+
+
+def composite(image1, image2, mask):
+ """
+ Create composite image by blending images using a transparency mask.
+
+ :param image1: The first image.
+ :param image2: The second image. Must have the same mode and
+ size as the first image.
+ :param mask: A mask image. This image can have mode
+ "1", "L", or "RGBA", and must have the same size as the
+ other two images.
+ """
+
+ image = image2.copy()
+ image.paste(image1, None, mask)
+ return image
+
+
+def eval(image, *args):
+ """
+ Applies the function (which should take one argument) to each pixel
+ in the given image. If the image has more than one band, the same
+ function is applied to each band. Note that the function is
+ evaluated once for each possible pixel value, so you cannot use
+ random components or other generators.
+
+ :param image: The input image.
+ :param function: A function object, taking one integer argument.
+ :returns: An :py:class:`~PIL.Image.Image` object.
+ """
+
+ return image.point(args[0])
+
+
+def merge(mode, bands):
+ """
+ Merge a set of single band images into a new multiband image.
+
+ :param mode: The mode to use for the output image. See:
+ :ref:`concept-modes`.
+ :param bands: A sequence containing one single-band image for
+ each band in the output image. All bands must have the
+ same size.
+ :returns: An :py:class:`~PIL.Image.Image` object.
+ """
+
+ if getmodebands(mode) != len(bands) or "*" in mode:
+ msg = "wrong number of bands"
+ raise ValueError(msg)
+ for band in bands[1:]:
+ if band.mode != getmodetype(mode):
+ msg = "mode mismatch"
+ raise ValueError(msg)
+ if band.size != bands[0].size:
+ msg = "size mismatch"
+ raise ValueError(msg)
+ for band in bands:
+ band.load()
+ return bands[0]._new(core.merge(mode, *[b.im for b in bands]))
+
+
+# --------------------------------------------------------------------
+# Plugin registry
+
+
+def register_open(id, factory, accept=None) -> None:
+ """
+ Register an image file plugin. This function should not be used
+ in application code.
+
+ :param id: An image format identifier.
+ :param factory: An image file factory method.
+ :param accept: An optional function that can be used to quickly
+ reject images having another format.
+ """
+ id = id.upper()
+ if id not in ID:
+ ID.append(id)
+ OPEN[id] = factory, accept
+
+
+def register_mime(id, mimetype):
+ """
+ Registers an image MIME type by populating ``Image.MIME``. This function
+ should not be used in application code.
+
+ ``Image.MIME`` provides a mapping from image format identifiers to mime
+ formats, but :py:meth:`~PIL.ImageFile.ImageFile.get_format_mimetype` can
+ provide a different result for specific images.
+
+ :param id: An image format identifier.
+ :param mimetype: The image MIME type for this format.
+ """
+ MIME[id.upper()] = mimetype
+
+
+def register_save(id, driver):
+ """
+ Registers an image save function. This function should not be
+ used in application code.
+
+ :param id: An image format identifier.
+ :param driver: A function to save images in this format.
+ """
+ SAVE[id.upper()] = driver
+
+
+def register_save_all(id, driver):
+ """
+ Registers an image function to save all the frames
+ of a multiframe format. This function should not be
+ used in application code.
+
+ :param id: An image format identifier.
+ :param driver: A function to save images in this format.
+ """
+ SAVE_ALL[id.upper()] = driver
+
+
+def register_extension(id, extension) -> None:
+ """
+ Registers an image extension. This function should not be
+ used in application code.
+
+ :param id: An image format identifier.
+ :param extension: An extension used for this format.
+ """
+ EXTENSION[extension.lower()] = id.upper()
+
+
+def register_extensions(id, extensions):
+ """
+ Registers image extensions. This function should not be
+ used in application code.
+
+ :param id: An image format identifier.
+ :param extensions: A list of extensions used for this format.
+ """
+ for extension in extensions:
+ register_extension(id, extension)
+
+
+def registered_extensions():
+ """
+ Returns a dictionary containing all file extensions belonging
+ to registered plugins
+ """
+ init()
+ return EXTENSION
+
+
+def register_decoder(name, decoder):
+ """
+ Registers an image decoder. This function should not be
+ used in application code.
+
+ :param name: The name of the decoder
+ :param decoder: A callable(mode, args) that returns an
+ ImageFile.PyDecoder object
+
+ .. versionadded:: 4.1.0
+ """
+ DECODERS[name] = decoder
+
+
+def register_encoder(name, encoder):
+ """
+ Registers an image encoder. This function should not be
+ used in application code.
+
+ :param name: The name of the encoder
+ :param encoder: A callable(mode, args) that returns an
+ ImageFile.PyEncoder object
+
+ .. versionadded:: 4.1.0
+ """
+ ENCODERS[name] = encoder
+
+
+# --------------------------------------------------------------------
+# Simple display support.
+
+
+def _show(image, **options):
+ from . import ImageShow
+
+ ImageShow.show(image, **options)
+
+
+# --------------------------------------------------------------------
+# Effects
+
+
+def effect_mandelbrot(size, extent, quality):
+ """
+ Generate a Mandelbrot set covering the given extent.
+
+ :param size: The requested size in pixels, as a 2-tuple:
+ (width, height).
+ :param extent: The extent to cover, as a 4-tuple:
+ (x0, y0, x1, y1).
+ :param quality: Quality.
+ """
+ return Image()._new(core.effect_mandelbrot(size, extent, quality))
+
+
+def effect_noise(size, sigma):
+ """
+ Generate Gaussian noise centered around 128.
+
+ :param size: The requested size in pixels, as a 2-tuple:
+ (width, height).
+ :param sigma: Standard deviation of noise.
+ """
+ return Image()._new(core.effect_noise(size, sigma))
+
+
+def linear_gradient(mode):
+ """
+ Generate 256x256 linear gradient from black to white, top to bottom.
+
+ :param mode: Input mode.
+ """
+ return Image()._new(core.linear_gradient(mode))
+
+
+def radial_gradient(mode):
+ """
+ Generate 256x256 radial gradient from black to white, centre to edge.
+
+ :param mode: Input mode.
+ """
+ return Image()._new(core.radial_gradient(mode))
+
+
+# --------------------------------------------------------------------
+# Resources
+
+
+def _apply_env_variables(env=None):
+ if env is None:
+ env = os.environ
+
+ for var_name, setter in [
+ ("PILLOW_ALIGNMENT", core.set_alignment),
+ ("PILLOW_BLOCK_SIZE", core.set_block_size),
+ ("PILLOW_BLOCKS_MAX", core.set_blocks_max),
+ ]:
+ if var_name not in env:
+ continue
+
+ var = env[var_name].lower()
+
+ units = 1
+ for postfix, mul in [("k", 1024), ("m", 1024 * 1024)]:
+ if var.endswith(postfix):
+ units = mul
+ var = var[: -len(postfix)]
+
+ try:
+ var = int(var) * units
+ except ValueError:
+ warnings.warn(f"{var_name} is not int")
+ continue
+
+ try:
+ setter(var)
+ except ValueError as e:
+ warnings.warn(f"{var_name}: {e}")
+
+
+_apply_env_variables()
+atexit.register(core.clear_cache)
+
+
+class Exif(MutableMapping):
+ """
+ This class provides read and write access to EXIF image data::
+
+ from PIL import Image
+ im = Image.open("exif.png")
+ exif = im.getexif() # Returns an instance of this class
+
+ Information can be read and written, iterated over or deleted::
+
+ print(exif[274]) # 1
+ exif[274] = 2
+ for k, v in exif.items():
+ print("Tag", k, "Value", v) # Tag 274 Value 2
+ del exif[274]
+
+ To access information beyond IFD0, :py:meth:`~PIL.Image.Exif.get_ifd`
+ returns a dictionary::
+
+ from PIL import ExifTags
+ im = Image.open("exif_gps.jpg")
+ exif = im.getexif()
+ gps_ifd = exif.get_ifd(ExifTags.IFD.GPSInfo)
+ print(gps_ifd)
+
+ Other IFDs include ``ExifTags.IFD.Exif``, ``ExifTags.IFD.Makernote``,
+ ``ExifTags.IFD.Interop`` and ``ExifTags.IFD.IFD1``.
+
+ :py:mod:`~PIL.ExifTags` also has enum classes to provide names for data::
+
+ print(exif[ExifTags.Base.Software]) # PIL
+ print(gps_ifd[ExifTags.GPS.GPSDateStamp]) # 1999:99:99 99:99:99
+ """
+
+ endian = None
+ bigtiff = False
+
+ def __init__(self):
+ self._data = {}
+ self._hidden_data = {}
+ self._ifds = {}
+ self._info = None
+ self._loaded_exif = None
+
+ def _fixup(self, value):
+ try:
+ if len(value) == 1 and isinstance(value, tuple):
+ return value[0]
+ except Exception:
+ pass
+ return value
+
+ def _fixup_dict(self, src_dict):
+ # Helper function
+ # returns a dict with any single item tuples/lists as individual values
+ return {k: self._fixup(v) for k, v in src_dict.items()}
+
+ def _get_ifd_dict(self, offset):
+ try:
+ # an offset pointer to the location of the nested embedded IFD.
+ # It should be a long, but may be corrupted.
+ self.fp.seek(offset)
+ except (KeyError, TypeError):
+ pass
+ else:
+ from . import TiffImagePlugin
+
+ info = TiffImagePlugin.ImageFileDirectory_v2(self.head)
+ info.load(self.fp)
+ return self._fixup_dict(info)
+
+ def _get_head(self):
+ version = b"\x2B" if self.bigtiff else b"\x2A"
+ if self.endian == "<":
+ head = b"II" + version + b"\x00" + o32le(8)
+ else:
+ head = b"MM\x00" + version + o32be(8)
+ if self.bigtiff:
+ head += o32le(8) if self.endian == "<" else o32be(8)
+ head += b"\x00\x00\x00\x00"
+ return head
+
+ def load(self, data):
+ # Extract EXIF information. This is highly experimental,
+ # and is likely to be replaced with something better in a future
+ # version.
+
+ # The EXIF record consists of a TIFF file embedded in a JPEG
+ # application marker (!).
+ if data == self._loaded_exif:
+ return
+ self._loaded_exif = data
+ self._data.clear()
+ self._hidden_data.clear()
+ self._ifds.clear()
+ if data and data.startswith(b"Exif\x00\x00"):
+ data = data[6:]
+ if not data:
+ self._info = None
+ return
+
+ self.fp = io.BytesIO(data)
+ self.head = self.fp.read(8)
+ # process dictionary
+ from . import TiffImagePlugin
+
+ self._info = TiffImagePlugin.ImageFileDirectory_v2(self.head)
+ self.endian = self._info._endian
+ self.fp.seek(self._info.next)
+ self._info.load(self.fp)
+
+ def load_from_fp(self, fp, offset=None):
+ self._loaded_exif = None
+ self._data.clear()
+ self._hidden_data.clear()
+ self._ifds.clear()
+
+ # process dictionary
+ from . import TiffImagePlugin
+
+ self.fp = fp
+ if offset is not None:
+ self.head = self._get_head()
+ else:
+ self.head = self.fp.read(8)
+ self._info = TiffImagePlugin.ImageFileDirectory_v2(self.head)
+ if self.endian is None:
+ self.endian = self._info._endian
+ if offset is None:
+ offset = self._info.next
+ self.fp.tell()
+ self.fp.seek(offset)
+ self._info.load(self.fp)
+
+ def _get_merged_dict(self):
+ merged_dict = dict(self)
+
+ # get EXIF extension
+ if ExifTags.IFD.Exif in self:
+ ifd = self._get_ifd_dict(self[ExifTags.IFD.Exif])
+ if ifd:
+ merged_dict.update(ifd)
+
+ # GPS
+ if ExifTags.IFD.GPSInfo in self:
+ merged_dict[ExifTags.IFD.GPSInfo] = self._get_ifd_dict(
+ self[ExifTags.IFD.GPSInfo]
+ )
+
+ return merged_dict
+
+ def tobytes(self, offset=8):
+ from . import TiffImagePlugin
+
+ head = self._get_head()
+ ifd = TiffImagePlugin.ImageFileDirectory_v2(ifh=head)
+ for tag, value in self.items():
+ if tag in [
+ ExifTags.IFD.Exif,
+ ExifTags.IFD.GPSInfo,
+ ] and not isinstance(value, dict):
+ value = self.get_ifd(tag)
+ if (
+ tag == ExifTags.IFD.Exif
+ and ExifTags.IFD.Interop in value
+ and not isinstance(value[ExifTags.IFD.Interop], dict)
+ ):
+ value = value.copy()
+ value[ExifTags.IFD.Interop] = self.get_ifd(ExifTags.IFD.Interop)
+ ifd[tag] = value
+ return b"Exif\x00\x00" + head + ifd.tobytes(offset)
+
+ def get_ifd(self, tag):
+ if tag not in self._ifds:
+ if tag == ExifTags.IFD.IFD1:
+ if self._info is not None and self._info.next != 0:
+ self._ifds[tag] = self._get_ifd_dict(self._info.next)
+ elif tag in [ExifTags.IFD.Exif, ExifTags.IFD.GPSInfo]:
+ offset = self._hidden_data.get(tag, self.get(tag))
+ if offset is not None:
+ self._ifds[tag] = self._get_ifd_dict(offset)
+ elif tag in [ExifTags.IFD.Interop, ExifTags.IFD.Makernote]:
+ if ExifTags.IFD.Exif not in self._ifds:
+ self.get_ifd(ExifTags.IFD.Exif)
+ tag_data = self._ifds[ExifTags.IFD.Exif][tag]
+ if tag == ExifTags.IFD.Makernote:
+ from .TiffImagePlugin import ImageFileDirectory_v2
+
+ if tag_data[:8] == b"FUJIFILM":
+ ifd_offset = i32le(tag_data, 8)
+ ifd_data = tag_data[ifd_offset:]
+
+ makernote = {}
+ for i in range(0, struct.unpack(" 4:
+ (offset,) = struct.unpack("H", tag_data[:2])[0]):
+ ifd_tag, typ, count, data = struct.unpack(
+ ">HHL4s", tag_data[i * 12 + 2 : (i + 1) * 12 + 2]
+ )
+ if ifd_tag == 0x1101:
+ # CameraInfo
+ (offset,) = struct.unpack(">L", data)
+ self.fp.seek(offset)
+
+ camerainfo = {"ModelID": self.fp.read(4)}
+
+ self.fp.read(4)
+ # Seconds since 2000
+ camerainfo["TimeStamp"] = i32le(self.fp.read(12))
+
+ self.fp.read(4)
+ camerainfo["InternalSerialNumber"] = self.fp.read(4)
+
+ self.fp.read(12)
+ parallax = self.fp.read(4)
+ handler = ImageFileDirectory_v2._load_dispatch[
+ TiffTags.FLOAT
+ ][1]
+ camerainfo["Parallax"] = handler(
+ ImageFileDirectory_v2(), parallax, False
+ )
+
+ self.fp.read(4)
+ camerainfo["Category"] = self.fp.read(2)
+
+ makernote = {0x1101: dict(self._fixup_dict(camerainfo))}
+ self._ifds[tag] = makernote
+ else:
+ # Interop
+ self._ifds[tag] = self._get_ifd_dict(tag_data)
+ ifd = self._ifds.get(tag, {})
+ if tag == ExifTags.IFD.Exif and self._hidden_data:
+ ifd = {
+ k: v
+ for (k, v) in ifd.items()
+ if k not in (ExifTags.IFD.Interop, ExifTags.IFD.Makernote)
+ }
+ return ifd
+
+ def hide_offsets(self):
+ for tag in (ExifTags.IFD.Exif, ExifTags.IFD.GPSInfo):
+ if tag in self:
+ self._hidden_data[tag] = self[tag]
+ del self[tag]
+
+ def __str__(self):
+ if self._info is not None:
+ # Load all keys into self._data
+ for tag in self._info:
+ self[tag]
+
+ return str(self._data)
+
+ def __len__(self):
+ keys = set(self._data)
+ if self._info is not None:
+ keys.update(self._info)
+ return len(keys)
+
+ def __getitem__(self, tag):
+ if self._info is not None and tag not in self._data and tag in self._info:
+ self._data[tag] = self._fixup(self._info[tag])
+ del self._info[tag]
+ return self._data[tag]
+
+ def __contains__(self, tag):
+ return tag in self._data or (self._info is not None and tag in self._info)
+
+ def __setitem__(self, tag, value):
+ if self._info is not None and tag in self._info:
+ del self._info[tag]
+ self._data[tag] = value
+
+ def __delitem__(self, tag):
+ if self._info is not None and tag in self._info:
+ del self._info[tag]
+ else:
+ del self._data[tag]
+
+ def __iter__(self):
+ keys = set(self._data)
+ if self._info is not None:
+ keys.update(self._info)
+ return iter(keys)
diff --git a/Lib/site-packages/PIL/ImageChops.py b/Lib/site-packages/PIL/ImageChops.py
new file mode 100644
index 0000000..29a5c99
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageChops.py
@@ -0,0 +1,311 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# standard channel operations
+#
+# History:
+# 1996-03-24 fl Created
+# 1996-08-13 fl Added logical operations (for "1" images)
+# 2000-10-12 fl Added offset method (from Image.py)
+#
+# Copyright (c) 1997-2000 by Secret Labs AB
+# Copyright (c) 1996-2000 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+from __future__ import annotations
+
+from . import Image
+
+
+def constant(image: Image.Image, value: int) -> Image.Image:
+ """Fill a channel with a given gray level.
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ return Image.new("L", image.size, value)
+
+
+def duplicate(image: Image.Image) -> Image.Image:
+ """Copy a channel. Alias for :py:meth:`PIL.Image.Image.copy`.
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ return image.copy()
+
+
+def invert(image: Image.Image) -> Image.Image:
+ """
+ Invert an image (channel). ::
+
+ out = MAX - image
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image.load()
+ return image._new(image.im.chop_invert())
+
+
+def lighter(image1: Image.Image, image2: Image.Image) -> Image.Image:
+ """
+ Compares the two images, pixel by pixel, and returns a new image containing
+ the lighter values. ::
+
+ out = max(image1, image2)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_lighter(image2.im))
+
+
+def darker(image1: Image.Image, image2: Image.Image) -> Image.Image:
+ """
+ Compares the two images, pixel by pixel, and returns a new image containing
+ the darker values. ::
+
+ out = min(image1, image2)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_darker(image2.im))
+
+
+def difference(image1: Image.Image, image2: Image.Image) -> Image.Image:
+ """
+ Returns the absolute value of the pixel-by-pixel difference between the two
+ images. ::
+
+ out = abs(image1 - image2)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_difference(image2.im))
+
+
+def multiply(image1: Image.Image, image2: Image.Image) -> Image.Image:
+ """
+ Superimposes two images on top of each other.
+
+ If you multiply an image with a solid black image, the result is black. If
+ you multiply with a solid white image, the image is unaffected. ::
+
+ out = image1 * image2 / MAX
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_multiply(image2.im))
+
+
+def screen(image1: Image.Image, image2: Image.Image) -> Image.Image:
+ """
+ Superimposes two inverted images on top of each other. ::
+
+ out = MAX - ((MAX - image1) * (MAX - image2) / MAX)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_screen(image2.im))
+
+
+def soft_light(image1: Image.Image, image2: Image.Image) -> Image.Image:
+ """
+ Superimposes two images on top of each other using the Soft Light algorithm
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_soft_light(image2.im))
+
+
+def hard_light(image1: Image.Image, image2: Image.Image) -> Image.Image:
+ """
+ Superimposes two images on top of each other using the Hard Light algorithm
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_hard_light(image2.im))
+
+
+def overlay(image1: Image.Image, image2: Image.Image) -> Image.Image:
+ """
+ Superimposes two images on top of each other using the Overlay algorithm
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_overlay(image2.im))
+
+
+def add(
+ image1: Image.Image, image2: Image.Image, scale: float = 1.0, offset: float = 0
+) -> Image.Image:
+ """
+ Adds two images, dividing the result by scale and adding the
+ offset. If omitted, scale defaults to 1.0, and offset to 0.0. ::
+
+ out = ((image1 + image2) / scale + offset)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_add(image2.im, scale, offset))
+
+
+def subtract(
+ image1: Image.Image, image2: Image.Image, scale: float = 1.0, offset: float = 0
+) -> Image.Image:
+ """
+ Subtracts two images, dividing the result by scale and adding the offset.
+ If omitted, scale defaults to 1.0, and offset to 0.0. ::
+
+ out = ((image1 - image2) / scale + offset)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_subtract(image2.im, scale, offset))
+
+
+def add_modulo(image1: Image.Image, image2: Image.Image) -> Image.Image:
+ """Add two images, without clipping the result. ::
+
+ out = ((image1 + image2) % MAX)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_add_modulo(image2.im))
+
+
+def subtract_modulo(image1: Image.Image, image2: Image.Image) -> Image.Image:
+ """Subtract two images, without clipping the result. ::
+
+ out = ((image1 - image2) % MAX)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_subtract_modulo(image2.im))
+
+
+def logical_and(image1: Image.Image, image2: Image.Image) -> Image.Image:
+ """Logical AND between two images.
+
+ Both of the images must have mode "1". If you would like to perform a
+ logical AND on an image with a mode other than "1", try
+ :py:meth:`~PIL.ImageChops.multiply` instead, using a black-and-white mask
+ as the second image. ::
+
+ out = ((image1 and image2) % MAX)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_and(image2.im))
+
+
+def logical_or(image1: Image.Image, image2: Image.Image) -> Image.Image:
+ """Logical OR between two images.
+
+ Both of the images must have mode "1". ::
+
+ out = ((image1 or image2) % MAX)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_or(image2.im))
+
+
+def logical_xor(image1: Image.Image, image2: Image.Image) -> Image.Image:
+ """Logical XOR between two images.
+
+ Both of the images must have mode "1". ::
+
+ out = ((bool(image1) != bool(image2)) % MAX)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_xor(image2.im))
+
+
+def blend(image1: Image.Image, image2: Image.Image, alpha: float) -> Image.Image:
+ """Blend images using constant transparency weight. Alias for
+ :py:func:`PIL.Image.blend`.
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ return Image.blend(image1, image2, alpha)
+
+
+def composite(
+ image1: Image.Image, image2: Image.Image, mask: Image.Image
+) -> Image.Image:
+ """Create composite using transparency mask. Alias for
+ :py:func:`PIL.Image.composite`.
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ return Image.composite(image1, image2, mask)
+
+
+def offset(image: Image.Image, xoffset: int, yoffset: int | None = None) -> Image.Image:
+ """Returns a copy of the image where data has been offset by the given
+ distances. Data wraps around the edges. If ``yoffset`` is omitted, it
+ is assumed to be equal to ``xoffset``.
+
+ :param image: Input image.
+ :param xoffset: The horizontal distance.
+ :param yoffset: The vertical distance. If omitted, both
+ distances are set to the same value.
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ if yoffset is None:
+ yoffset = xoffset
+ image.load()
+ return image._new(image.im.offset(xoffset, yoffset))
diff --git a/Lib/site-packages/PIL/ImageCms.py b/Lib/site-packages/PIL/ImageCms.py
new file mode 100644
index 0000000..643fce8
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageCms.py
@@ -0,0 +1,1007 @@
+# The Python Imaging Library.
+# $Id$
+
+# Optional color management support, based on Kevin Cazabon's PyCMS
+# library.
+
+# History:
+
+# 2009-03-08 fl Added to PIL.
+
+# Copyright (C) 2002-2003 Kevin Cazabon
+# Copyright (c) 2009 by Fredrik Lundh
+# Copyright (c) 2013 by Eric Soroos
+
+# See the README file for information on usage and redistribution. See
+# below for the original description.
+from __future__ import annotations
+
+import sys
+from enum import IntEnum
+
+from . import Image
+
+try:
+ from . import _imagingcms
+except ImportError as ex:
+ # Allow error import for doc purposes, but error out when accessing
+ # anything in core.
+ from ._util import DeferredError
+
+ _imagingcms = DeferredError.new(ex)
+
+DESCRIPTION = """
+pyCMS
+
+ a Python / PIL interface to the littleCMS ICC Color Management System
+ Copyright (C) 2002-2003 Kevin Cazabon
+ kevin@cazabon.com
+ https://www.cazabon.com
+
+ pyCMS home page: https://www.cazabon.com/pyCMS
+ littleCMS home page: https://www.littlecms.com
+ (littleCMS is Copyright (C) 1998-2001 Marti Maria)
+
+ Originally released under LGPL. Graciously donated to PIL in
+ March 2009, for distribution under the standard PIL license
+
+ The pyCMS.py module provides a "clean" interface between Python/PIL and
+ pyCMSdll, taking care of some of the more complex handling of the direct
+ pyCMSdll functions, as well as error-checking and making sure that all
+ relevant data is kept together.
+
+ While it is possible to call pyCMSdll functions directly, it's not highly
+ recommended.
+
+ Version History:
+
+ 1.0.0 pil Oct 2013 Port to LCMS 2.
+
+ 0.1.0 pil mod March 10, 2009
+
+ Renamed display profile to proof profile. The proof
+ profile is the profile of the device that is being
+ simulated, not the profile of the device which is
+ actually used to display/print the final simulation
+ (that'd be the output profile) - also see LCMSAPI.txt
+ input colorspace -> using 'renderingIntent' -> proof
+ colorspace -> using 'proofRenderingIntent' -> output
+ colorspace
+
+ Added LCMS FLAGS support.
+ Added FLAGS["SOFTPROOFING"] as default flag for
+ buildProofTransform (otherwise the proof profile/intent
+ would be ignored).
+
+ 0.1.0 pil March 2009 - added to PIL, as PIL.ImageCms
+
+ 0.0.2 alpha Jan 6, 2002
+
+ Added try/except statements around type() checks of
+ potential CObjects... Python won't let you use type()
+ on them, and raises a TypeError (stupid, if you ask
+ me!)
+
+ Added buildProofTransformFromOpenProfiles() function.
+ Additional fixes in DLL, see DLL code for details.
+
+ 0.0.1 alpha first public release, Dec. 26, 2002
+
+ Known to-do list with current version (of Python interface, not pyCMSdll):
+
+ none
+
+"""
+
+VERSION = "1.0.0 pil"
+
+# --------------------------------------------------------------------.
+
+core = _imagingcms
+
+#
+# intent/direction values
+
+
+class Intent(IntEnum):
+ PERCEPTUAL = 0
+ RELATIVE_COLORIMETRIC = 1
+ SATURATION = 2
+ ABSOLUTE_COLORIMETRIC = 3
+
+
+class Direction(IntEnum):
+ INPUT = 0
+ OUTPUT = 1
+ PROOF = 2
+
+
+#
+# flags
+
+FLAGS = {
+ "MATRIXINPUT": 1,
+ "MATRIXOUTPUT": 2,
+ "MATRIXONLY": (1 | 2),
+ "NOWHITEONWHITEFIXUP": 4, # Don't hot fix scum dot
+ # Don't create prelinearization tables on precalculated transforms
+ # (internal use):
+ "NOPRELINEARIZATION": 16,
+ "GUESSDEVICECLASS": 32, # Guess device class (for transform2devicelink)
+ "NOTCACHE": 64, # Inhibit 1-pixel cache
+ "NOTPRECALC": 256,
+ "NULLTRANSFORM": 512, # Don't transform anyway
+ "HIGHRESPRECALC": 1024, # Use more memory to give better accuracy
+ "LOWRESPRECALC": 2048, # Use less memory to minimize resources
+ "WHITEBLACKCOMPENSATION": 8192,
+ "BLACKPOINTCOMPENSATION": 8192,
+ "GAMUTCHECK": 4096, # Out of Gamut alarm
+ "SOFTPROOFING": 16384, # Do softproofing
+ "PRESERVEBLACK": 32768, # Black preservation
+ "NODEFAULTRESOURCEDEF": 16777216, # CRD special
+ "GRIDPOINTS": lambda n: (n & 0xFF) << 16, # Gridpoints
+}
+
+_MAX_FLAG = 0
+for flag in FLAGS.values():
+ if isinstance(flag, int):
+ _MAX_FLAG = _MAX_FLAG | flag
+
+
+# --------------------------------------------------------------------.
+# Experimental PIL-level API
+# --------------------------------------------------------------------.
+
+##
+# Profile.
+
+
+class ImageCmsProfile:
+ def __init__(self, profile):
+ """
+ :param profile: Either a string representing a filename,
+ a file like object containing a profile or a
+ low-level profile object
+
+ """
+
+ if isinstance(profile, str):
+ if sys.platform == "win32":
+ profile_bytes_path = profile.encode()
+ try:
+ profile_bytes_path.decode("ascii")
+ except UnicodeDecodeError:
+ with open(profile, "rb") as f:
+ self._set(core.profile_frombytes(f.read()))
+ return
+ self._set(core.profile_open(profile), profile)
+ elif hasattr(profile, "read"):
+ self._set(core.profile_frombytes(profile.read()))
+ elif isinstance(profile, _imagingcms.CmsProfile):
+ self._set(profile)
+ else:
+ msg = "Invalid type for Profile"
+ raise TypeError(msg)
+
+ def _set(self, profile, filename=None):
+ self.profile = profile
+ self.filename = filename
+ self.product_name = None # profile.product_name
+ self.product_info = None # profile.product_info
+
+ def tobytes(self):
+ """
+ Returns the profile in a format suitable for embedding in
+ saved images.
+
+ :returns: a bytes object containing the ICC profile.
+ """
+
+ return core.profile_tobytes(self.profile)
+
+
+class ImageCmsTransform(Image.ImagePointHandler):
+
+ """
+ Transform. This can be used with the procedural API, or with the standard
+ :py:func:`~PIL.Image.Image.point` method.
+
+ Will return the output profile in the ``output.info['icc_profile']``.
+ """
+
+ def __init__(
+ self,
+ input,
+ output,
+ input_mode,
+ output_mode,
+ intent=Intent.PERCEPTUAL,
+ proof=None,
+ proof_intent=Intent.ABSOLUTE_COLORIMETRIC,
+ flags=0,
+ ):
+ if proof is None:
+ self.transform = core.buildTransform(
+ input.profile, output.profile, input_mode, output_mode, intent, flags
+ )
+ else:
+ self.transform = core.buildProofTransform(
+ input.profile,
+ output.profile,
+ proof.profile,
+ input_mode,
+ output_mode,
+ intent,
+ proof_intent,
+ flags,
+ )
+ # Note: inputMode and outputMode are for pyCMS compatibility only
+ self.input_mode = self.inputMode = input_mode
+ self.output_mode = self.outputMode = output_mode
+
+ self.output_profile = output
+
+ def point(self, im):
+ return self.apply(im)
+
+ def apply(self, im, imOut=None):
+ im.load()
+ if imOut is None:
+ imOut = Image.new(self.output_mode, im.size, None)
+ self.transform.apply(im.im.id, imOut.im.id)
+ imOut.info["icc_profile"] = self.output_profile.tobytes()
+ return imOut
+
+ def apply_in_place(self, im):
+ im.load()
+ if im.mode != self.output_mode:
+ msg = "mode mismatch"
+ raise ValueError(msg) # wrong output mode
+ self.transform.apply(im.im.id, im.im.id)
+ im.info["icc_profile"] = self.output_profile.tobytes()
+ return im
+
+
+def get_display_profile(handle=None):
+ """
+ (experimental) Fetches the profile for the current display device.
+
+ :returns: ``None`` if the profile is not known.
+ """
+
+ if sys.platform != "win32":
+ return None
+
+ from . import ImageWin
+
+ if isinstance(handle, ImageWin.HDC):
+ profile = core.get_display_profile_win32(handle, 1)
+ else:
+ profile = core.get_display_profile_win32(handle or 0)
+ if profile is None:
+ return None
+ return ImageCmsProfile(profile)
+
+
+# --------------------------------------------------------------------.
+# pyCMS compatible layer
+# --------------------------------------------------------------------.
+
+
+class PyCMSError(Exception):
+
+ """(pyCMS) Exception class.
+ This is used for all errors in the pyCMS API."""
+
+ pass
+
+
+def profileToProfile(
+ im,
+ inputProfile,
+ outputProfile,
+ renderingIntent=Intent.PERCEPTUAL,
+ outputMode=None,
+ inPlace=False,
+ flags=0,
+):
+ """
+ (pyCMS) Applies an ICC transformation to a given image, mapping from
+ ``inputProfile`` to ``outputProfile``.
+
+ If the input or output profiles specified are not valid filenames, a
+ :exc:`PyCMSError` will be raised. If ``inPlace`` is ``True`` and
+ ``outputMode != im.mode``, a :exc:`PyCMSError` will be raised.
+ If an error occurs during application of the profiles,
+ a :exc:`PyCMSError` will be raised.
+ If ``outputMode`` is not a mode supported by the ``outputProfile`` (or by pyCMS),
+ a :exc:`PyCMSError` will be raised.
+
+ This function applies an ICC transformation to im from ``inputProfile``'s
+ color space to ``outputProfile``'s color space using the specified rendering
+ intent to decide how to handle out-of-gamut colors.
+
+ ``outputMode`` can be used to specify that a color mode conversion is to
+ be done using these profiles, but the specified profiles must be able
+ to handle that mode. I.e., if converting im from RGB to CMYK using
+ profiles, the input profile must handle RGB data, and the output
+ profile must handle CMYK data.
+
+ :param im: An open :py:class:`~PIL.Image.Image` object (i.e. Image.new(...)
+ or Image.open(...), etc.)
+ :param inputProfile: String, as a valid filename path to the ICC input
+ profile you wish to use for this image, or a profile object
+ :param outputProfile: String, as a valid filename path to the ICC output
+ profile you wish to use for this image, or a profile object
+ :param renderingIntent: Integer (0-3) specifying the rendering intent you
+ wish to use for the transform
+
+ ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT)
+ ImageCms.Intent.RELATIVE_COLORIMETRIC = 1
+ ImageCms.Intent.SATURATION = 2
+ ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3
+
+ see the pyCMS documentation for details on rendering intents and what
+ they do.
+ :param outputMode: A valid PIL mode for the output image (i.e. "RGB",
+ "CMYK", etc.). Note: if rendering the image "inPlace", outputMode
+ MUST be the same mode as the input, or omitted completely. If
+ omitted, the outputMode will be the same as the mode of the input
+ image (im.mode)
+ :param inPlace: Boolean. If ``True``, the original image is modified in-place,
+ and ``None`` is returned. If ``False`` (default), a new
+ :py:class:`~PIL.Image.Image` object is returned with the transform applied.
+ :param flags: Integer (0-...) specifying additional flags
+ :returns: Either None or a new :py:class:`~PIL.Image.Image` object, depending on
+ the value of ``inPlace``
+ :exception PyCMSError:
+ """
+
+ if outputMode is None:
+ outputMode = im.mode
+
+ if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3):
+ msg = "renderingIntent must be an integer between 0 and 3"
+ raise PyCMSError(msg)
+
+ if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG):
+ msg = f"flags must be an integer between 0 and {_MAX_FLAG}"
+ raise PyCMSError(msg)
+
+ try:
+ if not isinstance(inputProfile, ImageCmsProfile):
+ inputProfile = ImageCmsProfile(inputProfile)
+ if not isinstance(outputProfile, ImageCmsProfile):
+ outputProfile = ImageCmsProfile(outputProfile)
+ transform = ImageCmsTransform(
+ inputProfile,
+ outputProfile,
+ im.mode,
+ outputMode,
+ renderingIntent,
+ flags=flags,
+ )
+ if inPlace:
+ transform.apply_in_place(im)
+ imOut = None
+ else:
+ imOut = transform.apply(im)
+ except (OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+ return imOut
+
+
+def getOpenProfile(profileFilename):
+ """
+ (pyCMS) Opens an ICC profile file.
+
+ The PyCMSProfile object can be passed back into pyCMS for use in creating
+ transforms and such (as in ImageCms.buildTransformFromOpenProfiles()).
+
+ If ``profileFilename`` is not a valid filename for an ICC profile,
+ a :exc:`PyCMSError` will be raised.
+
+ :param profileFilename: String, as a valid filename path to the ICC profile
+ you wish to open, or a file-like object.
+ :returns: A CmsProfile class object.
+ :exception PyCMSError:
+ """
+
+ try:
+ return ImageCmsProfile(profileFilename)
+ except (OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def buildTransform(
+ inputProfile,
+ outputProfile,
+ inMode,
+ outMode,
+ renderingIntent=Intent.PERCEPTUAL,
+ flags=0,
+):
+ """
+ (pyCMS) Builds an ICC transform mapping from the ``inputProfile`` to the
+ ``outputProfile``. Use applyTransform to apply the transform to a given
+ image.
+
+ If the input or output profiles specified are not valid filenames, a
+ :exc:`PyCMSError` will be raised. If an error occurs during creation
+ of the transform, a :exc:`PyCMSError` will be raised.
+
+ If ``inMode`` or ``outMode`` are not a mode supported by the ``outputProfile``
+ (or by pyCMS), a :exc:`PyCMSError` will be raised.
+
+ This function builds and returns an ICC transform from the ``inputProfile``
+ to the ``outputProfile`` using the ``renderingIntent`` to determine what to do
+ with out-of-gamut colors. It will ONLY work for converting images that
+ are in ``inMode`` to images that are in ``outMode`` color format (PIL mode,
+ i.e. "RGB", "RGBA", "CMYK", etc.).
+
+ Building the transform is a fair part of the overhead in
+ ImageCms.profileToProfile(), so if you're planning on converting multiple
+ images using the same input/output settings, this can save you time.
+ Once you have a transform object, it can be used with
+ ImageCms.applyProfile() to convert images without the need to re-compute
+ the lookup table for the transform.
+
+ The reason pyCMS returns a class object rather than a handle directly
+ to the transform is that it needs to keep track of the PIL input/output
+ modes that the transform is meant for. These attributes are stored in
+ the ``inMode`` and ``outMode`` attributes of the object (which can be
+ manually overridden if you really want to, but I don't know of any
+ time that would be of use, or would even work).
+
+ :param inputProfile: String, as a valid filename path to the ICC input
+ profile you wish to use for this transform, or a profile object
+ :param outputProfile: String, as a valid filename path to the ICC output
+ profile you wish to use for this transform, or a profile object
+ :param inMode: String, as a valid PIL mode that the appropriate profile
+ also supports (i.e. "RGB", "RGBA", "CMYK", etc.)
+ :param outMode: String, as a valid PIL mode that the appropriate profile
+ also supports (i.e. "RGB", "RGBA", "CMYK", etc.)
+ :param renderingIntent: Integer (0-3) specifying the rendering intent you
+ wish to use for the transform
+
+ ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT)
+ ImageCms.Intent.RELATIVE_COLORIMETRIC = 1
+ ImageCms.Intent.SATURATION = 2
+ ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3
+
+ see the pyCMS documentation for details on rendering intents and what
+ they do.
+ :param flags: Integer (0-...) specifying additional flags
+ :returns: A CmsTransform class object.
+ :exception PyCMSError:
+ """
+
+ if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3):
+ msg = "renderingIntent must be an integer between 0 and 3"
+ raise PyCMSError(msg)
+
+ if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG):
+ msg = "flags must be an integer between 0 and %s" + _MAX_FLAG
+ raise PyCMSError(msg)
+
+ try:
+ if not isinstance(inputProfile, ImageCmsProfile):
+ inputProfile = ImageCmsProfile(inputProfile)
+ if not isinstance(outputProfile, ImageCmsProfile):
+ outputProfile = ImageCmsProfile(outputProfile)
+ return ImageCmsTransform(
+ inputProfile, outputProfile, inMode, outMode, renderingIntent, flags=flags
+ )
+ except (OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def buildProofTransform(
+ inputProfile,
+ outputProfile,
+ proofProfile,
+ inMode,
+ outMode,
+ renderingIntent=Intent.PERCEPTUAL,
+ proofRenderingIntent=Intent.ABSOLUTE_COLORIMETRIC,
+ flags=FLAGS["SOFTPROOFING"],
+):
+ """
+ (pyCMS) Builds an ICC transform mapping from the ``inputProfile`` to the
+ ``outputProfile``, but tries to simulate the result that would be
+ obtained on the ``proofProfile`` device.
+
+ If the input, output, or proof profiles specified are not valid
+ filenames, a :exc:`PyCMSError` will be raised.
+
+ If an error occurs during creation of the transform,
+ a :exc:`PyCMSError` will be raised.
+
+ If ``inMode`` or ``outMode`` are not a mode supported by the ``outputProfile``
+ (or by pyCMS), a :exc:`PyCMSError` will be raised.
+
+ This function builds and returns an ICC transform from the ``inputProfile``
+ to the ``outputProfile``, but tries to simulate the result that would be
+ obtained on the ``proofProfile`` device using ``renderingIntent`` and
+ ``proofRenderingIntent`` to determine what to do with out-of-gamut
+ colors. This is known as "soft-proofing". It will ONLY work for
+ converting images that are in ``inMode`` to images that are in outMode
+ color format (PIL mode, i.e. "RGB", "RGBA", "CMYK", etc.).
+
+ Usage of the resulting transform object is exactly the same as with
+ ImageCms.buildTransform().
+
+ Proof profiling is generally used when using an output device to get a
+ good idea of what the final printed/displayed image would look like on
+ the ``proofProfile`` device when it's quicker and easier to use the
+ output device for judging color. Generally, this means that the
+ output device is a monitor, or a dye-sub printer (etc.), and the simulated
+ device is something more expensive, complicated, or time consuming
+ (making it difficult to make a real print for color judgement purposes).
+
+ Soft-proofing basically functions by adjusting the colors on the
+ output device to match the colors of the device being simulated. However,
+ when the simulated device has a much wider gamut than the output
+ device, you may obtain marginal results.
+
+ :param inputProfile: String, as a valid filename path to the ICC input
+ profile you wish to use for this transform, or a profile object
+ :param outputProfile: String, as a valid filename path to the ICC output
+ (monitor, usually) profile you wish to use for this transform, or a
+ profile object
+ :param proofProfile: String, as a valid filename path to the ICC proof
+ profile you wish to use for this transform, or a profile object
+ :param inMode: String, as a valid PIL mode that the appropriate profile
+ also supports (i.e. "RGB", "RGBA", "CMYK", etc.)
+ :param outMode: String, as a valid PIL mode that the appropriate profile
+ also supports (i.e. "RGB", "RGBA", "CMYK", etc.)
+ :param renderingIntent: Integer (0-3) specifying the rendering intent you
+ wish to use for the input->proof (simulated) transform
+
+ ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT)
+ ImageCms.Intent.RELATIVE_COLORIMETRIC = 1
+ ImageCms.Intent.SATURATION = 2
+ ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3
+
+ see the pyCMS documentation for details on rendering intents and what
+ they do.
+ :param proofRenderingIntent: Integer (0-3) specifying the rendering intent
+ you wish to use for proof->output transform
+
+ ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT)
+ ImageCms.Intent.RELATIVE_COLORIMETRIC = 1
+ ImageCms.Intent.SATURATION = 2
+ ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3
+
+ see the pyCMS documentation for details on rendering intents and what
+ they do.
+ :param flags: Integer (0-...) specifying additional flags
+ :returns: A CmsTransform class object.
+ :exception PyCMSError:
+ """
+
+ if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3):
+ msg = "renderingIntent must be an integer between 0 and 3"
+ raise PyCMSError(msg)
+
+ if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG):
+ msg = "flags must be an integer between 0 and %s" + _MAX_FLAG
+ raise PyCMSError(msg)
+
+ try:
+ if not isinstance(inputProfile, ImageCmsProfile):
+ inputProfile = ImageCmsProfile(inputProfile)
+ if not isinstance(outputProfile, ImageCmsProfile):
+ outputProfile = ImageCmsProfile(outputProfile)
+ if not isinstance(proofProfile, ImageCmsProfile):
+ proofProfile = ImageCmsProfile(proofProfile)
+ return ImageCmsTransform(
+ inputProfile,
+ outputProfile,
+ inMode,
+ outMode,
+ renderingIntent,
+ proofProfile,
+ proofRenderingIntent,
+ flags,
+ )
+ except (OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+buildTransformFromOpenProfiles = buildTransform
+buildProofTransformFromOpenProfiles = buildProofTransform
+
+
+def applyTransform(im, transform, inPlace=False):
+ """
+ (pyCMS) Applies a transform to a given image.
+
+ If ``im.mode != transform.inMode``, a :exc:`PyCMSError` is raised.
+
+ If ``inPlace`` is ``True`` and ``transform.inMode != transform.outMode``, a
+ :exc:`PyCMSError` is raised.
+
+ If ``im.mode``, ``transform.inMode`` or ``transform.outMode`` is not
+ supported by pyCMSdll or the profiles you used for the transform, a
+ :exc:`PyCMSError` is raised.
+
+ If an error occurs while the transform is being applied,
+ a :exc:`PyCMSError` is raised.
+
+ This function applies a pre-calculated transform (from
+ ImageCms.buildTransform() or ImageCms.buildTransformFromOpenProfiles())
+ to an image. The transform can be used for multiple images, saving
+ considerable calculation time if doing the same conversion multiple times.
+
+ If you want to modify im in-place instead of receiving a new image as
+ the return value, set ``inPlace`` to ``True``. This can only be done if
+ ``transform.inMode`` and ``transform.outMode`` are the same, because we can't
+ change the mode in-place (the buffer sizes for some modes are
+ different). The default behavior is to return a new :py:class:`~PIL.Image.Image`
+ object of the same dimensions in mode ``transform.outMode``.
+
+ :param im: An :py:class:`~PIL.Image.Image` object, and im.mode must be the same
+ as the ``inMode`` supported by the transform.
+ :param transform: A valid CmsTransform class object
+ :param inPlace: Bool. If ``True``, ``im`` is modified in place and ``None`` is
+ returned, if ``False``, a new :py:class:`~PIL.Image.Image` object with the
+ transform applied is returned (and ``im`` is not changed). The default is
+ ``False``.
+ :returns: Either ``None``, or a new :py:class:`~PIL.Image.Image` object,
+ depending on the value of ``inPlace``. The profile will be returned in
+ the image's ``info['icc_profile']``.
+ :exception PyCMSError:
+ """
+
+ try:
+ if inPlace:
+ transform.apply_in_place(im)
+ imOut = None
+ else:
+ imOut = transform.apply(im)
+ except (TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+ return imOut
+
+
+def createProfile(colorSpace, colorTemp=-1):
+ """
+ (pyCMS) Creates a profile.
+
+ If colorSpace not in ``["LAB", "XYZ", "sRGB"]``,
+ a :exc:`PyCMSError` is raised.
+
+ If using LAB and ``colorTemp`` is not a positive integer,
+ a :exc:`PyCMSError` is raised.
+
+ If an error occurs while creating the profile,
+ a :exc:`PyCMSError` is raised.
+
+ Use this function to create common profiles on-the-fly instead of
+ having to supply a profile on disk and knowing the path to it. It
+ returns a normal CmsProfile object that can be passed to
+ ImageCms.buildTransformFromOpenProfiles() to create a transform to apply
+ to images.
+
+ :param colorSpace: String, the color space of the profile you wish to
+ create.
+ Currently only "LAB", "XYZ", and "sRGB" are supported.
+ :param colorTemp: Positive integer for the white point for the profile, in
+ degrees Kelvin (i.e. 5000, 6500, 9600, etc.). The default is for D50
+ illuminant if omitted (5000k). colorTemp is ONLY applied to LAB
+ profiles, and is ignored for XYZ and sRGB.
+ :returns: A CmsProfile class object
+ :exception PyCMSError:
+ """
+
+ if colorSpace not in ["LAB", "XYZ", "sRGB"]:
+ msg = (
+ f"Color space not supported for on-the-fly profile creation ({colorSpace})"
+ )
+ raise PyCMSError(msg)
+
+ if colorSpace == "LAB":
+ try:
+ colorTemp = float(colorTemp)
+ except (TypeError, ValueError) as e:
+ msg = f'Color temperature must be numeric, "{colorTemp}" not valid'
+ raise PyCMSError(msg) from e
+
+ try:
+ return core.createProfile(colorSpace, colorTemp)
+ except (TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def getProfileName(profile):
+ """
+
+ (pyCMS) Gets the internal product name for the given profile.
+
+ If ``profile`` isn't a valid CmsProfile object or filename to a profile,
+ a :exc:`PyCMSError` is raised If an error occurs while trying
+ to obtain the name tag, a :exc:`PyCMSError` is raised.
+
+ Use this function to obtain the INTERNAL name of the profile (stored
+ in an ICC tag in the profile itself), usually the one used when the
+ profile was originally created. Sometimes this tag also contains
+ additional information supplied by the creator.
+
+ :param profile: EITHER a valid CmsProfile object, OR a string of the
+ filename of an ICC profile.
+ :returns: A string containing the internal name of the profile as stored
+ in an ICC tag.
+ :exception PyCMSError:
+ """
+
+ try:
+ # add an extra newline to preserve pyCMS compatibility
+ if not isinstance(profile, ImageCmsProfile):
+ profile = ImageCmsProfile(profile)
+ # do it in python, not c.
+ # // name was "%s - %s" (model, manufacturer) || Description ,
+ # // but if the Model and Manufacturer were the same or the model
+ # // was long, Just the model, in 1.x
+ model = profile.profile.model
+ manufacturer = profile.profile.manufacturer
+
+ if not (model or manufacturer):
+ return (profile.profile.profile_description or "") + "\n"
+ if not manufacturer or len(model) > 30:
+ return model + "\n"
+ return f"{model} - {manufacturer}\n"
+
+ except (AttributeError, OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def getProfileInfo(profile):
+ """
+ (pyCMS) Gets the internal product information for the given profile.
+
+ If ``profile`` isn't a valid CmsProfile object or filename to a profile,
+ a :exc:`PyCMSError` is raised.
+
+ If an error occurs while trying to obtain the info tag,
+ a :exc:`PyCMSError` is raised.
+
+ Use this function to obtain the information stored in the profile's
+ info tag. This often contains details about the profile, and how it
+ was created, as supplied by the creator.
+
+ :param profile: EITHER a valid CmsProfile object, OR a string of the
+ filename of an ICC profile.
+ :returns: A string containing the internal profile information stored in
+ an ICC tag.
+ :exception PyCMSError:
+ """
+
+ try:
+ if not isinstance(profile, ImageCmsProfile):
+ profile = ImageCmsProfile(profile)
+ # add an extra newline to preserve pyCMS compatibility
+ # Python, not C. the white point bits weren't working well,
+ # so skipping.
+ # info was description \r\n\r\n copyright \r\n\r\n K007 tag \r\n\r\n whitepoint
+ description = profile.profile.profile_description
+ cpright = profile.profile.copyright
+ elements = [element for element in (description, cpright) if element]
+ return "\r\n\r\n".join(elements) + "\r\n\r\n"
+
+ except (AttributeError, OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def getProfileCopyright(profile):
+ """
+ (pyCMS) Gets the copyright for the given profile.
+
+ If ``profile`` isn't a valid CmsProfile object or filename to a profile, a
+ :exc:`PyCMSError` is raised.
+
+ If an error occurs while trying to obtain the copyright tag,
+ a :exc:`PyCMSError` is raised.
+
+ Use this function to obtain the information stored in the profile's
+ copyright tag.
+
+ :param profile: EITHER a valid CmsProfile object, OR a string of the
+ filename of an ICC profile.
+ :returns: A string containing the internal profile information stored in
+ an ICC tag.
+ :exception PyCMSError:
+ """
+ try:
+ # add an extra newline to preserve pyCMS compatibility
+ if not isinstance(profile, ImageCmsProfile):
+ profile = ImageCmsProfile(profile)
+ return (profile.profile.copyright or "") + "\n"
+ except (AttributeError, OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def getProfileManufacturer(profile):
+ """
+ (pyCMS) Gets the manufacturer for the given profile.
+
+ If ``profile`` isn't a valid CmsProfile object or filename to a profile, a
+ :exc:`PyCMSError` is raised.
+
+ If an error occurs while trying to obtain the manufacturer tag, a
+ :exc:`PyCMSError` is raised.
+
+ Use this function to obtain the information stored in the profile's
+ manufacturer tag.
+
+ :param profile: EITHER a valid CmsProfile object, OR a string of the
+ filename of an ICC profile.
+ :returns: A string containing the internal profile information stored in
+ an ICC tag.
+ :exception PyCMSError:
+ """
+ try:
+ # add an extra newline to preserve pyCMS compatibility
+ if not isinstance(profile, ImageCmsProfile):
+ profile = ImageCmsProfile(profile)
+ return (profile.profile.manufacturer or "") + "\n"
+ except (AttributeError, OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def getProfileModel(profile):
+ """
+ (pyCMS) Gets the model for the given profile.
+
+ If ``profile`` isn't a valid CmsProfile object or filename to a profile, a
+ :exc:`PyCMSError` is raised.
+
+ If an error occurs while trying to obtain the model tag,
+ a :exc:`PyCMSError` is raised.
+
+ Use this function to obtain the information stored in the profile's
+ model tag.
+
+ :param profile: EITHER a valid CmsProfile object, OR a string of the
+ filename of an ICC profile.
+ :returns: A string containing the internal profile information stored in
+ an ICC tag.
+ :exception PyCMSError:
+ """
+
+ try:
+ # add an extra newline to preserve pyCMS compatibility
+ if not isinstance(profile, ImageCmsProfile):
+ profile = ImageCmsProfile(profile)
+ return (profile.profile.model or "") + "\n"
+ except (AttributeError, OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def getProfileDescription(profile):
+ """
+ (pyCMS) Gets the description for the given profile.
+
+ If ``profile`` isn't a valid CmsProfile object or filename to a profile, a
+ :exc:`PyCMSError` is raised.
+
+ If an error occurs while trying to obtain the description tag,
+ a :exc:`PyCMSError` is raised.
+
+ Use this function to obtain the information stored in the profile's
+ description tag.
+
+ :param profile: EITHER a valid CmsProfile object, OR a string of the
+ filename of an ICC profile.
+ :returns: A string containing the internal profile information stored in an
+ ICC tag.
+ :exception PyCMSError:
+ """
+
+ try:
+ # add an extra newline to preserve pyCMS compatibility
+ if not isinstance(profile, ImageCmsProfile):
+ profile = ImageCmsProfile(profile)
+ return (profile.profile.profile_description or "") + "\n"
+ except (AttributeError, OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def getDefaultIntent(profile):
+ """
+ (pyCMS) Gets the default intent name for the given profile.
+
+ If ``profile`` isn't a valid CmsProfile object or filename to a profile, a
+ :exc:`PyCMSError` is raised.
+
+ If an error occurs while trying to obtain the default intent, a
+ :exc:`PyCMSError` is raised.
+
+ Use this function to determine the default (and usually best optimized)
+ rendering intent for this profile. Most profiles support multiple
+ rendering intents, but are intended mostly for one type of conversion.
+ If you wish to use a different intent than returned, use
+ ImageCms.isIntentSupported() to verify it will work first.
+
+ :param profile: EITHER a valid CmsProfile object, OR a string of the
+ filename of an ICC profile.
+ :returns: Integer 0-3 specifying the default rendering intent for this
+ profile.
+
+ ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT)
+ ImageCms.Intent.RELATIVE_COLORIMETRIC = 1
+ ImageCms.Intent.SATURATION = 2
+ ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3
+
+ see the pyCMS documentation for details on rendering intents and what
+ they do.
+ :exception PyCMSError:
+ """
+
+ try:
+ if not isinstance(profile, ImageCmsProfile):
+ profile = ImageCmsProfile(profile)
+ return profile.profile.rendering_intent
+ except (AttributeError, OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def isIntentSupported(profile, intent, direction):
+ """
+ (pyCMS) Checks if a given intent is supported.
+
+ Use this function to verify that you can use your desired
+ ``intent`` with ``profile``, and that ``profile`` can be used for the
+ input/output/proof profile as you desire.
+
+ Some profiles are created specifically for one "direction", can cannot
+ be used for others. Some profiles can only be used for certain
+ rendering intents, so it's best to either verify this before trying
+ to create a transform with them (using this function), or catch the
+ potential :exc:`PyCMSError` that will occur if they don't
+ support the modes you select.
+
+ :param profile: EITHER a valid CmsProfile object, OR a string of the
+ filename of an ICC profile.
+ :param intent: Integer (0-3) specifying the rendering intent you wish to
+ use with this profile
+
+ ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT)
+ ImageCms.Intent.RELATIVE_COLORIMETRIC = 1
+ ImageCms.Intent.SATURATION = 2
+ ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3
+
+ see the pyCMS documentation for details on rendering intents and what
+ they do.
+ :param direction: Integer specifying if the profile is to be used for
+ input, output, or proof
+
+ INPUT = 0 (or use ImageCms.Direction.INPUT)
+ OUTPUT = 1 (or use ImageCms.Direction.OUTPUT)
+ PROOF = 2 (or use ImageCms.Direction.PROOF)
+
+ :returns: 1 if the intent/direction are supported, -1 if they are not.
+ :exception PyCMSError:
+ """
+
+ try:
+ if not isinstance(profile, ImageCmsProfile):
+ profile = ImageCmsProfile(profile)
+ # FIXME: I get different results for the same data w. different
+ # compilers. Bug in LittleCMS or in the binding?
+ if profile.profile.is_intent_supported(intent, direction):
+ return 1
+ else:
+ return -1
+ except (AttributeError, OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def versions():
+ """
+ (pyCMS) Fetches versions.
+ """
+
+ return VERSION, core.littlecms_version, sys.version.split()[0], Image.__version__
diff --git a/Lib/site-packages/PIL/ImageColor.py b/Lib/site-packages/PIL/ImageColor.py
new file mode 100644
index 0000000..ad59b06
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageColor.py
@@ -0,0 +1,317 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# map CSS3-style colour description strings to RGB
+#
+# History:
+# 2002-10-24 fl Added support for CSS-style color strings
+# 2002-12-15 fl Added RGBA support
+# 2004-03-27 fl Fixed remaining int() problems for Python 1.5.2
+# 2004-07-19 fl Fixed gray/grey spelling issues
+# 2009-03-05 fl Fixed rounding error in grayscale calculation
+#
+# Copyright (c) 2002-2004 by Secret Labs AB
+# Copyright (c) 2002-2004 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import re
+from functools import lru_cache
+
+from . import Image
+
+
+@lru_cache
+def getrgb(color):
+ """
+ Convert a color string to an RGB or RGBA tuple. If the string cannot be
+ parsed, this function raises a :py:exc:`ValueError` exception.
+
+ .. versionadded:: 1.1.4
+
+ :param color: A color string
+ :return: ``(red, green, blue[, alpha])``
+ """
+ if len(color) > 100:
+ msg = "color specifier is too long"
+ raise ValueError(msg)
+ color = color.lower()
+
+ rgb = colormap.get(color, None)
+ if rgb:
+ if isinstance(rgb, tuple):
+ return rgb
+ colormap[color] = rgb = getrgb(rgb)
+ return rgb
+
+ # check for known string formats
+ if re.match("#[a-f0-9]{3}$", color):
+ return int(color[1] * 2, 16), int(color[2] * 2, 16), int(color[3] * 2, 16)
+
+ if re.match("#[a-f0-9]{4}$", color):
+ return (
+ int(color[1] * 2, 16),
+ int(color[2] * 2, 16),
+ int(color[3] * 2, 16),
+ int(color[4] * 2, 16),
+ )
+
+ if re.match("#[a-f0-9]{6}$", color):
+ return int(color[1:3], 16), int(color[3:5], 16), int(color[5:7], 16)
+
+ if re.match("#[a-f0-9]{8}$", color):
+ return (
+ int(color[1:3], 16),
+ int(color[3:5], 16),
+ int(color[5:7], 16),
+ int(color[7:9], 16),
+ )
+
+ m = re.match(r"rgb\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color)
+ if m:
+ return int(m.group(1)), int(m.group(2)), int(m.group(3))
+
+ m = re.match(r"rgb\(\s*(\d+)%\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color)
+ if m:
+ return (
+ int((int(m.group(1)) * 255) / 100.0 + 0.5),
+ int((int(m.group(2)) * 255) / 100.0 + 0.5),
+ int((int(m.group(3)) * 255) / 100.0 + 0.5),
+ )
+
+ m = re.match(
+ r"hsl\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color
+ )
+ if m:
+ from colorsys import hls_to_rgb
+
+ rgb = hls_to_rgb(
+ float(m.group(1)) / 360.0,
+ float(m.group(3)) / 100.0,
+ float(m.group(2)) / 100.0,
+ )
+ return (
+ int(rgb[0] * 255 + 0.5),
+ int(rgb[1] * 255 + 0.5),
+ int(rgb[2] * 255 + 0.5),
+ )
+
+ m = re.match(
+ r"hs[bv]\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color
+ )
+ if m:
+ from colorsys import hsv_to_rgb
+
+ rgb = hsv_to_rgb(
+ float(m.group(1)) / 360.0,
+ float(m.group(2)) / 100.0,
+ float(m.group(3)) / 100.0,
+ )
+ return (
+ int(rgb[0] * 255 + 0.5),
+ int(rgb[1] * 255 + 0.5),
+ int(rgb[2] * 255 + 0.5),
+ )
+
+ m = re.match(r"rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color)
+ if m:
+ return int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))
+ msg = f"unknown color specifier: {repr(color)}"
+ raise ValueError(msg)
+
+
+@lru_cache
+def getcolor(color, mode):
+ """
+ Same as :py:func:`~PIL.ImageColor.getrgb` for most modes. However, if
+ ``mode`` is HSV, converts the RGB value to a HSV value, or if ``mode`` is
+ not color or a palette image, converts the RGB value to a grayscale value.
+ If the string cannot be parsed, this function raises a :py:exc:`ValueError`
+ exception.
+
+ .. versionadded:: 1.1.4
+
+ :param color: A color string
+ :param mode: Convert result to this mode
+ :return: ``(graylevel[, alpha]) or (red, green, blue[, alpha])``
+ """
+ # same as getrgb, but converts the result to the given mode
+ color, alpha = getrgb(color), 255
+ if len(color) == 4:
+ color, alpha = color[:3], color[3]
+
+ if mode == "HSV":
+ from colorsys import rgb_to_hsv
+
+ r, g, b = color
+ h, s, v = rgb_to_hsv(r / 255, g / 255, b / 255)
+ return int(h * 255), int(s * 255), int(v * 255)
+ elif Image.getmodebase(mode) == "L":
+ r, g, b = color
+ # ITU-R Recommendation 601-2 for nonlinear RGB
+ # scaled to 24 bits to match the convert's implementation.
+ color = (r * 19595 + g * 38470 + b * 7471 + 0x8000) >> 16
+ if mode[-1] == "A":
+ return color, alpha
+ else:
+ if mode[-1] == "A":
+ return color + (alpha,)
+ return color
+
+
+colormap = {
+ # X11 colour table from https://drafts.csswg.org/css-color-4/, with
+ # gray/grey spelling issues fixed. This is a superset of HTML 4.0
+ # colour names used in CSS 1.
+ "aliceblue": "#f0f8ff",
+ "antiquewhite": "#faebd7",
+ "aqua": "#00ffff",
+ "aquamarine": "#7fffd4",
+ "azure": "#f0ffff",
+ "beige": "#f5f5dc",
+ "bisque": "#ffe4c4",
+ "black": "#000000",
+ "blanchedalmond": "#ffebcd",
+ "blue": "#0000ff",
+ "blueviolet": "#8a2be2",
+ "brown": "#a52a2a",
+ "burlywood": "#deb887",
+ "cadetblue": "#5f9ea0",
+ "chartreuse": "#7fff00",
+ "chocolate": "#d2691e",
+ "coral": "#ff7f50",
+ "cornflowerblue": "#6495ed",
+ "cornsilk": "#fff8dc",
+ "crimson": "#dc143c",
+ "cyan": "#00ffff",
+ "darkblue": "#00008b",
+ "darkcyan": "#008b8b",
+ "darkgoldenrod": "#b8860b",
+ "darkgray": "#a9a9a9",
+ "darkgrey": "#a9a9a9",
+ "darkgreen": "#006400",
+ "darkkhaki": "#bdb76b",
+ "darkmagenta": "#8b008b",
+ "darkolivegreen": "#556b2f",
+ "darkorange": "#ff8c00",
+ "darkorchid": "#9932cc",
+ "darkred": "#8b0000",
+ "darksalmon": "#e9967a",
+ "darkseagreen": "#8fbc8f",
+ "darkslateblue": "#483d8b",
+ "darkslategray": "#2f4f4f",
+ "darkslategrey": "#2f4f4f",
+ "darkturquoise": "#00ced1",
+ "darkviolet": "#9400d3",
+ "deeppink": "#ff1493",
+ "deepskyblue": "#00bfff",
+ "dimgray": "#696969",
+ "dimgrey": "#696969",
+ "dodgerblue": "#1e90ff",
+ "firebrick": "#b22222",
+ "floralwhite": "#fffaf0",
+ "forestgreen": "#228b22",
+ "fuchsia": "#ff00ff",
+ "gainsboro": "#dcdcdc",
+ "ghostwhite": "#f8f8ff",
+ "gold": "#ffd700",
+ "goldenrod": "#daa520",
+ "gray": "#808080",
+ "grey": "#808080",
+ "green": "#008000",
+ "greenyellow": "#adff2f",
+ "honeydew": "#f0fff0",
+ "hotpink": "#ff69b4",
+ "indianred": "#cd5c5c",
+ "indigo": "#4b0082",
+ "ivory": "#fffff0",
+ "khaki": "#f0e68c",
+ "lavender": "#e6e6fa",
+ "lavenderblush": "#fff0f5",
+ "lawngreen": "#7cfc00",
+ "lemonchiffon": "#fffacd",
+ "lightblue": "#add8e6",
+ "lightcoral": "#f08080",
+ "lightcyan": "#e0ffff",
+ "lightgoldenrodyellow": "#fafad2",
+ "lightgreen": "#90ee90",
+ "lightgray": "#d3d3d3",
+ "lightgrey": "#d3d3d3",
+ "lightpink": "#ffb6c1",
+ "lightsalmon": "#ffa07a",
+ "lightseagreen": "#20b2aa",
+ "lightskyblue": "#87cefa",
+ "lightslategray": "#778899",
+ "lightslategrey": "#778899",
+ "lightsteelblue": "#b0c4de",
+ "lightyellow": "#ffffe0",
+ "lime": "#00ff00",
+ "limegreen": "#32cd32",
+ "linen": "#faf0e6",
+ "magenta": "#ff00ff",
+ "maroon": "#800000",
+ "mediumaquamarine": "#66cdaa",
+ "mediumblue": "#0000cd",
+ "mediumorchid": "#ba55d3",
+ "mediumpurple": "#9370db",
+ "mediumseagreen": "#3cb371",
+ "mediumslateblue": "#7b68ee",
+ "mediumspringgreen": "#00fa9a",
+ "mediumturquoise": "#48d1cc",
+ "mediumvioletred": "#c71585",
+ "midnightblue": "#191970",
+ "mintcream": "#f5fffa",
+ "mistyrose": "#ffe4e1",
+ "moccasin": "#ffe4b5",
+ "navajowhite": "#ffdead",
+ "navy": "#000080",
+ "oldlace": "#fdf5e6",
+ "olive": "#808000",
+ "olivedrab": "#6b8e23",
+ "orange": "#ffa500",
+ "orangered": "#ff4500",
+ "orchid": "#da70d6",
+ "palegoldenrod": "#eee8aa",
+ "palegreen": "#98fb98",
+ "paleturquoise": "#afeeee",
+ "palevioletred": "#db7093",
+ "papayawhip": "#ffefd5",
+ "peachpuff": "#ffdab9",
+ "peru": "#cd853f",
+ "pink": "#ffc0cb",
+ "plum": "#dda0dd",
+ "powderblue": "#b0e0e6",
+ "purple": "#800080",
+ "rebeccapurple": "#663399",
+ "red": "#ff0000",
+ "rosybrown": "#bc8f8f",
+ "royalblue": "#4169e1",
+ "saddlebrown": "#8b4513",
+ "salmon": "#fa8072",
+ "sandybrown": "#f4a460",
+ "seagreen": "#2e8b57",
+ "seashell": "#fff5ee",
+ "sienna": "#a0522d",
+ "silver": "#c0c0c0",
+ "skyblue": "#87ceeb",
+ "slateblue": "#6a5acd",
+ "slategray": "#708090",
+ "slategrey": "#708090",
+ "snow": "#fffafa",
+ "springgreen": "#00ff7f",
+ "steelblue": "#4682b4",
+ "tan": "#d2b48c",
+ "teal": "#008080",
+ "thistle": "#d8bfd8",
+ "tomato": "#ff6347",
+ "turquoise": "#40e0d0",
+ "violet": "#ee82ee",
+ "wheat": "#f5deb3",
+ "white": "#ffffff",
+ "whitesmoke": "#f5f5f5",
+ "yellow": "#ffff00",
+ "yellowgreen": "#9acd32",
+}
diff --git a/Lib/site-packages/PIL/ImageDraw.py b/Lib/site-packages/PIL/ImageDraw.py
new file mode 100644
index 0000000..84665f5
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageDraw.py
@@ -0,0 +1,1065 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# drawing interface operations
+#
+# History:
+# 1996-04-13 fl Created (experimental)
+# 1996-08-07 fl Filled polygons, ellipses.
+# 1996-08-13 fl Added text support
+# 1998-06-28 fl Handle I and F images
+# 1998-12-29 fl Added arc; use arc primitive to draw ellipses
+# 1999-01-10 fl Added shape stuff (experimental)
+# 1999-02-06 fl Added bitmap support
+# 1999-02-11 fl Changed all primitives to take options
+# 1999-02-20 fl Fixed backwards compatibility
+# 2000-10-12 fl Copy on write, when necessary
+# 2001-02-18 fl Use default ink for bitmap/text also in fill mode
+# 2002-10-24 fl Added support for CSS-style color strings
+# 2002-12-10 fl Added experimental support for RGBA-on-RGB drawing
+# 2002-12-11 fl Refactored low-level drawing API (work in progress)
+# 2004-08-26 fl Made Draw() a factory function, added getdraw() support
+# 2004-09-04 fl Added width support to line primitive
+# 2004-09-10 fl Added font mode handling
+# 2006-06-19 fl Added font bearing support (getmask2)
+#
+# Copyright (c) 1997-2006 by Secret Labs AB
+# Copyright (c) 1996-2006 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import math
+import numbers
+import struct
+
+from . import Image, ImageColor
+
+"""
+A simple 2D drawing interface for PIL images.
+
+Application code should use the Draw factory, instead of
+directly.
+"""
+
+
+class ImageDraw:
+ font = None
+
+ def __init__(self, im, mode=None):
+ """
+ Create a drawing instance.
+
+ :param im: The image to draw in.
+ :param mode: Optional mode to use for color values. For RGB
+ images, this argument can be RGB or RGBA (to blend the
+ drawing into the image). For all other modes, this argument
+ must be the same as the image mode. If omitted, the mode
+ defaults to the mode of the image.
+ """
+ im.load()
+ if im.readonly:
+ im._copy() # make it writeable
+ blend = 0
+ if mode is None:
+ mode = im.mode
+ if mode != im.mode:
+ if mode == "RGBA" and im.mode == "RGB":
+ blend = 1
+ else:
+ msg = "mode mismatch"
+ raise ValueError(msg)
+ if mode == "P":
+ self.palette = im.palette
+ else:
+ self.palette = None
+ self._image = im
+ self.im = im.im
+ self.draw = Image.core.draw(self.im, blend)
+ self.mode = mode
+ if mode in ("I", "F"):
+ self.ink = self.draw.draw_ink(1)
+ else:
+ self.ink = self.draw.draw_ink(-1)
+ if mode in ("1", "P", "I", "F"):
+ # FIXME: fix Fill2 to properly support matte for I+F images
+ self.fontmode = "1"
+ else:
+ self.fontmode = "L" # aliasing is okay for other modes
+ self.fill = False
+
+ def getfont(self):
+ """
+ Get the current default font.
+
+ To set the default font for this ImageDraw instance::
+
+ from PIL import ImageDraw, ImageFont
+ draw.font = ImageFont.truetype("Tests/fonts/FreeMono.ttf")
+
+ To set the default font for all future ImageDraw instances::
+
+ from PIL import ImageDraw, ImageFont
+ ImageDraw.ImageDraw.font = ImageFont.truetype("Tests/fonts/FreeMono.ttf")
+
+ If the current default font is ``None``,
+ it is initialized with ``ImageFont.load_default()``.
+
+ :returns: An image font."""
+ if not self.font:
+ # FIXME: should add a font repository
+ from . import ImageFont
+
+ self.font = ImageFont.load_default()
+ return self.font
+
+ def _getfont(self, font_size):
+ if font_size is not None:
+ from . import ImageFont
+
+ font = ImageFont.load_default(font_size)
+ else:
+ font = self.getfont()
+ return font
+
+ def _getink(self, ink, fill=None):
+ if ink is None and fill is None:
+ if self.fill:
+ fill = self.ink
+ else:
+ ink = self.ink
+ else:
+ if ink is not None:
+ if isinstance(ink, str):
+ ink = ImageColor.getcolor(ink, self.mode)
+ if self.palette and not isinstance(ink, numbers.Number):
+ ink = self.palette.getcolor(ink, self._image)
+ ink = self.draw.draw_ink(ink)
+ if fill is not None:
+ if isinstance(fill, str):
+ fill = ImageColor.getcolor(fill, self.mode)
+ if self.palette and not isinstance(fill, numbers.Number):
+ fill = self.palette.getcolor(fill, self._image)
+ fill = self.draw.draw_ink(fill)
+ return ink, fill
+
+ def arc(self, xy, start, end, fill=None, width=1):
+ """Draw an arc."""
+ ink, fill = self._getink(fill)
+ if ink is not None:
+ self.draw.draw_arc(xy, start, end, ink, width)
+
+ def bitmap(self, xy, bitmap, fill=None):
+ """Draw a bitmap."""
+ bitmap.load()
+ ink, fill = self._getink(fill)
+ if ink is None:
+ ink = fill
+ if ink is not None:
+ self.draw.draw_bitmap(xy, bitmap.im, ink)
+
+ def chord(self, xy, start, end, fill=None, outline=None, width=1):
+ """Draw a chord."""
+ ink, fill = self._getink(outline, fill)
+ if fill is not None:
+ self.draw.draw_chord(xy, start, end, fill, 1)
+ if ink is not None and ink != fill and width != 0:
+ self.draw.draw_chord(xy, start, end, ink, 0, width)
+
+ def ellipse(self, xy, fill=None, outline=None, width=1):
+ """Draw an ellipse."""
+ ink, fill = self._getink(outline, fill)
+ if fill is not None:
+ self.draw.draw_ellipse(xy, fill, 1)
+ if ink is not None and ink != fill and width != 0:
+ self.draw.draw_ellipse(xy, ink, 0, width)
+
+ def line(self, xy, fill=None, width=0, joint=None):
+ """Draw a line, or a connected sequence of line segments."""
+ ink = self._getink(fill)[0]
+ if ink is not None:
+ self.draw.draw_lines(xy, ink, width)
+ if joint == "curve" and width > 4:
+ if not isinstance(xy[0], (list, tuple)):
+ xy = [tuple(xy[i : i + 2]) for i in range(0, len(xy), 2)]
+ for i in range(1, len(xy) - 1):
+ point = xy[i]
+ angles = [
+ math.degrees(math.atan2(end[0] - start[0], start[1] - end[1]))
+ % 360
+ for start, end in ((xy[i - 1], point), (point, xy[i + 1]))
+ ]
+ if angles[0] == angles[1]:
+ # This is a straight line, so no joint is required
+ continue
+
+ def coord_at_angle(coord, angle):
+ x, y = coord
+ angle -= 90
+ distance = width / 2 - 1
+ return tuple(
+ p + (math.floor(p_d) if p_d > 0 else math.ceil(p_d))
+ for p, p_d in (
+ (x, distance * math.cos(math.radians(angle))),
+ (y, distance * math.sin(math.radians(angle))),
+ )
+ )
+
+ flipped = (
+ angles[1] > angles[0] and angles[1] - 180 > angles[0]
+ ) or (angles[1] < angles[0] and angles[1] + 180 > angles[0])
+ coords = [
+ (point[0] - width / 2 + 1, point[1] - width / 2 + 1),
+ (point[0] + width / 2 - 1, point[1] + width / 2 - 1),
+ ]
+ if flipped:
+ start, end = (angles[1] + 90, angles[0] + 90)
+ else:
+ start, end = (angles[0] - 90, angles[1] - 90)
+ self.pieslice(coords, start - 90, end - 90, fill)
+
+ if width > 8:
+ # Cover potential gaps between the line and the joint
+ if flipped:
+ gap_coords = [
+ coord_at_angle(point, angles[0] + 90),
+ point,
+ coord_at_angle(point, angles[1] + 90),
+ ]
+ else:
+ gap_coords = [
+ coord_at_angle(point, angles[0] - 90),
+ point,
+ coord_at_angle(point, angles[1] - 90),
+ ]
+ self.line(gap_coords, fill, width=3)
+
+ def shape(self, shape, fill=None, outline=None):
+ """(Experimental) Draw a shape."""
+ shape.close()
+ ink, fill = self._getink(outline, fill)
+ if fill is not None:
+ self.draw.draw_outline(shape, fill, 1)
+ if ink is not None and ink != fill:
+ self.draw.draw_outline(shape, ink, 0)
+
+ def pieslice(self, xy, start, end, fill=None, outline=None, width=1):
+ """Draw a pieslice."""
+ ink, fill = self._getink(outline, fill)
+ if fill is not None:
+ self.draw.draw_pieslice(xy, start, end, fill, 1)
+ if ink is not None and ink != fill and width != 0:
+ self.draw.draw_pieslice(xy, start, end, ink, 0, width)
+
+ def point(self, xy, fill=None):
+ """Draw one or more individual pixels."""
+ ink, fill = self._getink(fill)
+ if ink is not None:
+ self.draw.draw_points(xy, ink)
+
+ def polygon(self, xy, fill=None, outline=None, width=1):
+ """Draw a polygon."""
+ ink, fill = self._getink(outline, fill)
+ if fill is not None:
+ self.draw.draw_polygon(xy, fill, 1)
+ if ink is not None and ink != fill and width != 0:
+ if width == 1:
+ self.draw.draw_polygon(xy, ink, 0, width)
+ else:
+ # To avoid expanding the polygon outwards,
+ # use the fill as a mask
+ mask = Image.new("1", self.im.size)
+ mask_ink = self._getink(1)[0]
+
+ fill_im = mask.copy()
+ draw = Draw(fill_im)
+ draw.draw.draw_polygon(xy, mask_ink, 1)
+
+ ink_im = mask.copy()
+ draw = Draw(ink_im)
+ width = width * 2 - 1
+ draw.draw.draw_polygon(xy, mask_ink, 0, width)
+
+ mask.paste(ink_im, mask=fill_im)
+
+ im = Image.new(self.mode, self.im.size)
+ draw = Draw(im)
+ draw.draw.draw_polygon(xy, ink, 0, width)
+ self.im.paste(im.im, (0, 0) + im.size, mask.im)
+
+ def regular_polygon(
+ self, bounding_circle, n_sides, rotation=0, fill=None, outline=None, width=1
+ ):
+ """Draw a regular polygon."""
+ xy = _compute_regular_polygon_vertices(bounding_circle, n_sides, rotation)
+ self.polygon(xy, fill, outline, width)
+
+ def rectangle(self, xy, fill=None, outline=None, width=1):
+ """Draw a rectangle."""
+ ink, fill = self._getink(outline, fill)
+ if fill is not None:
+ self.draw.draw_rectangle(xy, fill, 1)
+ if ink is not None and ink != fill and width != 0:
+ self.draw.draw_rectangle(xy, ink, 0, width)
+
+ def rounded_rectangle(
+ self, xy, radius=0, fill=None, outline=None, width=1, *, corners=None
+ ):
+ """Draw a rounded rectangle."""
+ if isinstance(xy[0], (list, tuple)):
+ (x0, y0), (x1, y1) = xy
+ else:
+ x0, y0, x1, y1 = xy
+ if x1 < x0:
+ msg = "x1 must be greater than or equal to x0"
+ raise ValueError(msg)
+ if y1 < y0:
+ msg = "y1 must be greater than or equal to y0"
+ raise ValueError(msg)
+ if corners is None:
+ corners = (True, True, True, True)
+
+ d = radius * 2
+
+ full_x, full_y = False, False
+ if all(corners):
+ full_x = d >= x1 - x0 - 1
+ if full_x:
+ # The two left and two right corners are joined
+ d = x1 - x0
+ full_y = d >= y1 - y0 - 1
+ if full_y:
+ # The two top and two bottom corners are joined
+ d = y1 - y0
+ if full_x and full_y:
+ # If all corners are joined, that is a circle
+ return self.ellipse(xy, fill, outline, width)
+
+ if d == 0 or not any(corners):
+ # If the corners have no curve,
+ # or there are no corners,
+ # that is a rectangle
+ return self.rectangle(xy, fill, outline, width)
+
+ r = d // 2
+ ink, fill = self._getink(outline, fill)
+
+ def draw_corners(pieslice):
+ if full_x:
+ # Draw top and bottom halves
+ parts = (
+ ((x0, y0, x0 + d, y0 + d), 180, 360),
+ ((x0, y1 - d, x0 + d, y1), 0, 180),
+ )
+ elif full_y:
+ # Draw left and right halves
+ parts = (
+ ((x0, y0, x0 + d, y0 + d), 90, 270),
+ ((x1 - d, y0, x1, y0 + d), 270, 90),
+ )
+ else:
+ # Draw four separate corners
+ parts = []
+ for i, part in enumerate(
+ (
+ ((x0, y0, x0 + d, y0 + d), 180, 270),
+ ((x1 - d, y0, x1, y0 + d), 270, 360),
+ ((x1 - d, y1 - d, x1, y1), 0, 90),
+ ((x0, y1 - d, x0 + d, y1), 90, 180),
+ )
+ ):
+ if corners[i]:
+ parts.append(part)
+ for part in parts:
+ if pieslice:
+ self.draw.draw_pieslice(*(part + (fill, 1)))
+ else:
+ self.draw.draw_arc(*(part + (ink, width)))
+
+ if fill is not None:
+ draw_corners(True)
+
+ if full_x:
+ self.draw.draw_rectangle((x0, y0 + r + 1, x1, y1 - r - 1), fill, 1)
+ else:
+ self.draw.draw_rectangle((x0 + r + 1, y0, x1 - r - 1, y1), fill, 1)
+ if not full_x and not full_y:
+ left = [x0, y0, x0 + r, y1]
+ if corners[0]:
+ left[1] += r + 1
+ if corners[3]:
+ left[3] -= r + 1
+ self.draw.draw_rectangle(left, fill, 1)
+
+ right = [x1 - r, y0, x1, y1]
+ if corners[1]:
+ right[1] += r + 1
+ if corners[2]:
+ right[3] -= r + 1
+ self.draw.draw_rectangle(right, fill, 1)
+ if ink is not None and ink != fill and width != 0:
+ draw_corners(False)
+
+ if not full_x:
+ top = [x0, y0, x1, y0 + width - 1]
+ if corners[0]:
+ top[0] += r + 1
+ if corners[1]:
+ top[2] -= r + 1
+ self.draw.draw_rectangle(top, ink, 1)
+
+ bottom = [x0, y1 - width + 1, x1, y1]
+ if corners[3]:
+ bottom[0] += r + 1
+ if corners[2]:
+ bottom[2] -= r + 1
+ self.draw.draw_rectangle(bottom, ink, 1)
+ if not full_y:
+ left = [x0, y0, x0 + width - 1, y1]
+ if corners[0]:
+ left[1] += r + 1
+ if corners[3]:
+ left[3] -= r + 1
+ self.draw.draw_rectangle(left, ink, 1)
+
+ right = [x1 - width + 1, y0, x1, y1]
+ if corners[1]:
+ right[1] += r + 1
+ if corners[2]:
+ right[3] -= r + 1
+ self.draw.draw_rectangle(right, ink, 1)
+
+ def _multiline_check(self, text):
+ split_character = "\n" if isinstance(text, str) else b"\n"
+
+ return split_character in text
+
+ def _multiline_split(self, text):
+ split_character = "\n" if isinstance(text, str) else b"\n"
+
+ return text.split(split_character)
+
+ def _multiline_spacing(self, font, spacing, stroke_width):
+ return (
+ self.textbbox((0, 0), "A", font, stroke_width=stroke_width)[3]
+ + stroke_width
+ + spacing
+ )
+
+ def text(
+ self,
+ xy,
+ text,
+ fill=None,
+ font=None,
+ anchor=None,
+ spacing=4,
+ align="left",
+ direction=None,
+ features=None,
+ language=None,
+ stroke_width=0,
+ stroke_fill=None,
+ embedded_color=False,
+ *args,
+ **kwargs,
+ ):
+ """Draw text."""
+ if embedded_color and self.mode not in ("RGB", "RGBA"):
+ msg = "Embedded color supported only in RGB and RGBA modes"
+ raise ValueError(msg)
+
+ if font is None:
+ font = self._getfont(kwargs.get("font_size"))
+
+ if self._multiline_check(text):
+ return self.multiline_text(
+ xy,
+ text,
+ fill,
+ font,
+ anchor,
+ spacing,
+ align,
+ direction,
+ features,
+ language,
+ stroke_width,
+ stroke_fill,
+ embedded_color,
+ )
+
+ def getink(fill):
+ ink, fill = self._getink(fill)
+ if ink is None:
+ return fill
+ return ink
+
+ def draw_text(ink, stroke_width=0, stroke_offset=None):
+ mode = self.fontmode
+ if stroke_width == 0 and embedded_color:
+ mode = "RGBA"
+ coord = []
+ start = []
+ for i in range(2):
+ coord.append(int(xy[i]))
+ start.append(math.modf(xy[i])[0])
+ try:
+ mask, offset = font.getmask2(
+ text,
+ mode,
+ direction=direction,
+ features=features,
+ language=language,
+ stroke_width=stroke_width,
+ anchor=anchor,
+ ink=ink,
+ start=start,
+ *args,
+ **kwargs,
+ )
+ coord = coord[0] + offset[0], coord[1] + offset[1]
+ except AttributeError:
+ try:
+ mask = font.getmask(
+ text,
+ mode,
+ direction,
+ features,
+ language,
+ stroke_width,
+ anchor,
+ ink,
+ start=start,
+ *args,
+ **kwargs,
+ )
+ except TypeError:
+ mask = font.getmask(text)
+ if stroke_offset:
+ coord = coord[0] + stroke_offset[0], coord[1] + stroke_offset[1]
+ if mode == "RGBA":
+ # font.getmask2(mode="RGBA") returns color in RGB bands and mask in A
+ # extract mask and set text alpha
+ color, mask = mask, mask.getband(3)
+ ink_alpha = struct.pack("i", ink)[3]
+ color.fillband(3, ink_alpha)
+ x, y = coord
+ self.im.paste(color, (x, y, x + mask.size[0], y + mask.size[1]), mask)
+ else:
+ self.draw.draw_bitmap(coord, mask, ink)
+
+ ink = getink(fill)
+ if ink is not None:
+ stroke_ink = None
+ if stroke_width:
+ stroke_ink = getink(stroke_fill) if stroke_fill is not None else ink
+
+ if stroke_ink is not None:
+ # Draw stroked text
+ draw_text(stroke_ink, stroke_width)
+
+ # Draw normal text
+ draw_text(ink, 0)
+ else:
+ # Only draw normal text
+ draw_text(ink)
+
+ def multiline_text(
+ self,
+ xy,
+ text,
+ fill=None,
+ font=None,
+ anchor=None,
+ spacing=4,
+ align="left",
+ direction=None,
+ features=None,
+ language=None,
+ stroke_width=0,
+ stroke_fill=None,
+ embedded_color=False,
+ *,
+ font_size=None,
+ ):
+ if direction == "ttb":
+ msg = "ttb direction is unsupported for multiline text"
+ raise ValueError(msg)
+
+ if anchor is None:
+ anchor = "la"
+ elif len(anchor) != 2:
+ msg = "anchor must be a 2 character string"
+ raise ValueError(msg)
+ elif anchor[1] in "tb":
+ msg = "anchor not supported for multiline text"
+ raise ValueError(msg)
+
+ if font is None:
+ font = self._getfont(font_size)
+
+ widths = []
+ max_width = 0
+ lines = self._multiline_split(text)
+ line_spacing = self._multiline_spacing(font, spacing, stroke_width)
+ for line in lines:
+ line_width = self.textlength(
+ line, font, direction=direction, features=features, language=language
+ )
+ widths.append(line_width)
+ max_width = max(max_width, line_width)
+
+ top = xy[1]
+ if anchor[1] == "m":
+ top -= (len(lines) - 1) * line_spacing / 2.0
+ elif anchor[1] == "d":
+ top -= (len(lines) - 1) * line_spacing
+
+ for idx, line in enumerate(lines):
+ left = xy[0]
+ width_difference = max_width - widths[idx]
+
+ # first align left by anchor
+ if anchor[0] == "m":
+ left -= width_difference / 2.0
+ elif anchor[0] == "r":
+ left -= width_difference
+
+ # then align by align parameter
+ if align == "left":
+ pass
+ elif align == "center":
+ left += width_difference / 2.0
+ elif align == "right":
+ left += width_difference
+ else:
+ msg = 'align must be "left", "center" or "right"'
+ raise ValueError(msg)
+
+ self.text(
+ (left, top),
+ line,
+ fill,
+ font,
+ anchor,
+ direction=direction,
+ features=features,
+ language=language,
+ stroke_width=stroke_width,
+ stroke_fill=stroke_fill,
+ embedded_color=embedded_color,
+ )
+ top += line_spacing
+
+ def textlength(
+ self,
+ text,
+ font=None,
+ direction=None,
+ features=None,
+ language=None,
+ embedded_color=False,
+ *,
+ font_size=None,
+ ):
+ """Get the length of a given string, in pixels with 1/64 precision."""
+ if self._multiline_check(text):
+ msg = "can't measure length of multiline text"
+ raise ValueError(msg)
+ if embedded_color and self.mode not in ("RGB", "RGBA"):
+ msg = "Embedded color supported only in RGB and RGBA modes"
+ raise ValueError(msg)
+
+ if font is None:
+ font = self._getfont(font_size)
+ mode = "RGBA" if embedded_color else self.fontmode
+ return font.getlength(text, mode, direction, features, language)
+
+ def textbbox(
+ self,
+ xy,
+ text,
+ font=None,
+ anchor=None,
+ spacing=4,
+ align="left",
+ direction=None,
+ features=None,
+ language=None,
+ stroke_width=0,
+ embedded_color=False,
+ *,
+ font_size=None,
+ ):
+ """Get the bounding box of a given string, in pixels."""
+ if embedded_color and self.mode not in ("RGB", "RGBA"):
+ msg = "Embedded color supported only in RGB and RGBA modes"
+ raise ValueError(msg)
+
+ if font is None:
+ font = self._getfont(font_size)
+
+ if self._multiline_check(text):
+ return self.multiline_textbbox(
+ xy,
+ text,
+ font,
+ anchor,
+ spacing,
+ align,
+ direction,
+ features,
+ language,
+ stroke_width,
+ embedded_color,
+ )
+
+ mode = "RGBA" if embedded_color else self.fontmode
+ bbox = font.getbbox(
+ text, mode, direction, features, language, stroke_width, anchor
+ )
+ return bbox[0] + xy[0], bbox[1] + xy[1], bbox[2] + xy[0], bbox[3] + xy[1]
+
+ def multiline_textbbox(
+ self,
+ xy,
+ text,
+ font=None,
+ anchor=None,
+ spacing=4,
+ align="left",
+ direction=None,
+ features=None,
+ language=None,
+ stroke_width=0,
+ embedded_color=False,
+ *,
+ font_size=None,
+ ):
+ if direction == "ttb":
+ msg = "ttb direction is unsupported for multiline text"
+ raise ValueError(msg)
+
+ if anchor is None:
+ anchor = "la"
+ elif len(anchor) != 2:
+ msg = "anchor must be a 2 character string"
+ raise ValueError(msg)
+ elif anchor[1] in "tb":
+ msg = "anchor not supported for multiline text"
+ raise ValueError(msg)
+
+ if font is None:
+ font = self._getfont(font_size)
+
+ widths = []
+ max_width = 0
+ lines = self._multiline_split(text)
+ line_spacing = self._multiline_spacing(font, spacing, stroke_width)
+ for line in lines:
+ line_width = self.textlength(
+ line,
+ font,
+ direction=direction,
+ features=features,
+ language=language,
+ embedded_color=embedded_color,
+ )
+ widths.append(line_width)
+ max_width = max(max_width, line_width)
+
+ top = xy[1]
+ if anchor[1] == "m":
+ top -= (len(lines) - 1) * line_spacing / 2.0
+ elif anchor[1] == "d":
+ top -= (len(lines) - 1) * line_spacing
+
+ bbox = None
+
+ for idx, line in enumerate(lines):
+ left = xy[0]
+ width_difference = max_width - widths[idx]
+
+ # first align left by anchor
+ if anchor[0] == "m":
+ left -= width_difference / 2.0
+ elif anchor[0] == "r":
+ left -= width_difference
+
+ # then align by align parameter
+ if align == "left":
+ pass
+ elif align == "center":
+ left += width_difference / 2.0
+ elif align == "right":
+ left += width_difference
+ else:
+ msg = 'align must be "left", "center" or "right"'
+ raise ValueError(msg)
+
+ bbox_line = self.textbbox(
+ (left, top),
+ line,
+ font,
+ anchor,
+ direction=direction,
+ features=features,
+ language=language,
+ stroke_width=stroke_width,
+ embedded_color=embedded_color,
+ )
+ if bbox is None:
+ bbox = bbox_line
+ else:
+ bbox = (
+ min(bbox[0], bbox_line[0]),
+ min(bbox[1], bbox_line[1]),
+ max(bbox[2], bbox_line[2]),
+ max(bbox[3], bbox_line[3]),
+ )
+
+ top += line_spacing
+
+ if bbox is None:
+ return xy[0], xy[1], xy[0], xy[1]
+ return bbox
+
+
+def Draw(im, mode=None):
+ """
+ A simple 2D drawing interface for PIL images.
+
+ :param im: The image to draw in.
+ :param mode: Optional mode to use for color values. For RGB
+ images, this argument can be RGB or RGBA (to blend the
+ drawing into the image). For all other modes, this argument
+ must be the same as the image mode. If omitted, the mode
+ defaults to the mode of the image.
+ """
+ try:
+ return im.getdraw(mode)
+ except AttributeError:
+ return ImageDraw(im, mode)
+
+
+# experimental access to the outline API
+try:
+ Outline = Image.core.outline
+except AttributeError:
+ Outline = None
+
+
+def getdraw(im=None, hints=None):
+ """
+ (Experimental) A more advanced 2D drawing interface for PIL images,
+ based on the WCK interface.
+
+ :param im: The image to draw in.
+ :param hints: An optional list of hints.
+ :returns: A (drawing context, drawing resource factory) tuple.
+ """
+ # FIXME: this needs more work!
+ # FIXME: come up with a better 'hints' scheme.
+ handler = None
+ if not hints or "nicest" in hints:
+ try:
+ from . import _imagingagg as handler
+ except ImportError:
+ pass
+ if handler is None:
+ from . import ImageDraw2 as handler
+ if im:
+ im = handler.Draw(im)
+ return im, handler
+
+
+def floodfill(image, xy, value, border=None, thresh=0):
+ """
+ (experimental) Fills a bounded region with a given color.
+
+ :param image: Target image.
+ :param xy: Seed position (a 2-item coordinate tuple). See
+ :ref:`coordinate-system`.
+ :param value: Fill color.
+ :param border: Optional border value. If given, the region consists of
+ pixels with a color different from the border color. If not given,
+ the region consists of pixels having the same color as the seed
+ pixel.
+ :param thresh: Optional threshold value which specifies a maximum
+ tolerable difference of a pixel value from the 'background' in
+ order for it to be replaced. Useful for filling regions of
+ non-homogeneous, but similar, colors.
+ """
+ # based on an implementation by Eric S. Raymond
+ # amended by yo1995 @20180806
+ pixel = image.load()
+ x, y = xy
+ try:
+ background = pixel[x, y]
+ if _color_diff(value, background) <= thresh:
+ return # seed point already has fill color
+ pixel[x, y] = value
+ except (ValueError, IndexError):
+ return # seed point outside image
+ edge = {(x, y)}
+ # use a set to keep record of current and previous edge pixels
+ # to reduce memory consumption
+ full_edge = set()
+ while edge:
+ new_edge = set()
+ for x, y in edge: # 4 adjacent method
+ for s, t in ((x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)):
+ # If already processed, or if a coordinate is negative, skip
+ if (s, t) in full_edge or s < 0 or t < 0:
+ continue
+ try:
+ p = pixel[s, t]
+ except (ValueError, IndexError):
+ pass
+ else:
+ full_edge.add((s, t))
+ if border is None:
+ fill = _color_diff(p, background) <= thresh
+ else:
+ fill = p not in (value, border)
+ if fill:
+ pixel[s, t] = value
+ new_edge.add((s, t))
+ full_edge = edge # discard pixels processed
+ edge = new_edge
+
+
+def _compute_regular_polygon_vertices(bounding_circle, n_sides, rotation):
+ """
+ Generate a list of vertices for a 2D regular polygon.
+
+ :param bounding_circle: The bounding circle is a tuple defined
+ by a point and radius. The polygon is inscribed in this circle.
+ (e.g. ``bounding_circle=(x, y, r)`` or ``((x, y), r)``)
+ :param n_sides: Number of sides
+ (e.g. ``n_sides=3`` for a triangle, ``6`` for a hexagon)
+ :param rotation: Apply an arbitrary rotation to the polygon
+ (e.g. ``rotation=90``, applies a 90 degree rotation)
+ :return: List of regular polygon vertices
+ (e.g. ``[(25, 50), (50, 50), (50, 25), (25, 25)]``)
+
+ How are the vertices computed?
+ 1. Compute the following variables
+ - theta: Angle between the apothem & the nearest polygon vertex
+ - side_length: Length of each polygon edge
+ - centroid: Center of bounding circle (1st, 2nd elements of bounding_circle)
+ - polygon_radius: Polygon radius (last element of bounding_circle)
+ - angles: Location of each polygon vertex in polar grid
+ (e.g. A square with 0 degree rotation => [225.0, 315.0, 45.0, 135.0])
+
+ 2. For each angle in angles, get the polygon vertex at that angle
+ The vertex is computed using the equation below.
+ X= xcos(φ) + ysin(φ)
+ Y= −xsin(φ) + ycos(φ)
+
+ Note:
+ φ = angle in degrees
+ x = 0
+ y = polygon_radius
+
+ The formula above assumes rotation around the origin.
+ In our case, we are rotating around the centroid.
+ To account for this, we use the formula below
+ X = xcos(φ) + ysin(φ) + centroid_x
+ Y = −xsin(φ) + ycos(φ) + centroid_y
+ """
+ # 1. Error Handling
+ # 1.1 Check `n_sides` has an appropriate value
+ if not isinstance(n_sides, int):
+ msg = "n_sides should be an int"
+ raise TypeError(msg)
+ if n_sides < 3:
+ msg = "n_sides should be an int > 2"
+ raise ValueError(msg)
+
+ # 1.2 Check `bounding_circle` has an appropriate value
+ if not isinstance(bounding_circle, (list, tuple)):
+ msg = "bounding_circle should be a tuple"
+ raise TypeError(msg)
+
+ if len(bounding_circle) == 3:
+ *centroid, polygon_radius = bounding_circle
+ elif len(bounding_circle) == 2:
+ centroid, polygon_radius = bounding_circle
+ else:
+ msg = (
+ "bounding_circle should contain 2D coordinates "
+ "and a radius (e.g. (x, y, r) or ((x, y), r) )"
+ )
+ raise ValueError(msg)
+
+ if not all(isinstance(i, (int, float)) for i in (*centroid, polygon_radius)):
+ msg = "bounding_circle should only contain numeric data"
+ raise ValueError(msg)
+
+ if not len(centroid) == 2:
+ msg = "bounding_circle centre should contain 2D coordinates (e.g. (x, y))"
+ raise ValueError(msg)
+
+ if polygon_radius <= 0:
+ msg = "bounding_circle radius should be > 0"
+ raise ValueError(msg)
+
+ # 1.3 Check `rotation` has an appropriate value
+ if not isinstance(rotation, (int, float)):
+ msg = "rotation should be an int or float"
+ raise ValueError(msg)
+
+ # 2. Define Helper Functions
+ def _apply_rotation(point, degrees, centroid):
+ return (
+ round(
+ point[0] * math.cos(math.radians(360 - degrees))
+ - point[1] * math.sin(math.radians(360 - degrees))
+ + centroid[0],
+ 2,
+ ),
+ round(
+ point[1] * math.cos(math.radians(360 - degrees))
+ + point[0] * math.sin(math.radians(360 - degrees))
+ + centroid[1],
+ 2,
+ ),
+ )
+
+ def _compute_polygon_vertex(centroid, polygon_radius, angle):
+ start_point = [polygon_radius, 0]
+ return _apply_rotation(start_point, angle, centroid)
+
+ def _get_angles(n_sides, rotation):
+ angles = []
+ degrees = 360 / n_sides
+ # Start with the bottom left polygon vertex
+ current_angle = (270 - 0.5 * degrees) + rotation
+ for _ in range(0, n_sides):
+ angles.append(current_angle)
+ current_angle += degrees
+ if current_angle > 360:
+ current_angle -= 360
+ return angles
+
+ # 3. Variable Declarations
+ angles = _get_angles(n_sides, rotation)
+
+ # 4. Compute Vertices
+ return [
+ _compute_polygon_vertex(centroid, polygon_radius, angle) for angle in angles
+ ]
+
+
+def _color_diff(color1, color2):
+ """
+ Uses 1-norm distance to calculate difference between two values.
+ """
+ if isinstance(color2, tuple):
+ return sum(abs(color1[i] - color2[i]) for i in range(0, len(color2)))
+ else:
+ return abs(color1 - color2)
diff --git a/Lib/site-packages/PIL/ImageDraw2.py b/Lib/site-packages/PIL/ImageDraw2.py
new file mode 100644
index 0000000..35ee583
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageDraw2.py
@@ -0,0 +1,193 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# WCK-style drawing interface operations
+#
+# History:
+# 2003-12-07 fl created
+# 2005-05-15 fl updated; added to PIL as ImageDraw2
+# 2005-05-15 fl added text support
+# 2005-05-20 fl added arc/chord/pieslice support
+#
+# Copyright (c) 2003-2005 by Secret Labs AB
+# Copyright (c) 2003-2005 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+
+"""
+(Experimental) WCK-style drawing interface operations
+
+.. seealso:: :py:mod:`PIL.ImageDraw`
+"""
+from __future__ import annotations
+
+from . import Image, ImageColor, ImageDraw, ImageFont, ImagePath
+
+
+class Pen:
+ """Stores an outline color and width."""
+
+ def __init__(self, color, width=1, opacity=255):
+ self.color = ImageColor.getrgb(color)
+ self.width = width
+
+
+class Brush:
+ """Stores a fill color"""
+
+ def __init__(self, color, opacity=255):
+ self.color = ImageColor.getrgb(color)
+
+
+class Font:
+ """Stores a TrueType font and color"""
+
+ def __init__(self, color, file, size=12):
+ # FIXME: add support for bitmap fonts
+ self.color = ImageColor.getrgb(color)
+ self.font = ImageFont.truetype(file, size)
+
+
+class Draw:
+ """
+ (Experimental) WCK-style drawing interface
+ """
+
+ def __init__(self, image, size=None, color=None):
+ if not hasattr(image, "im"):
+ image = Image.new(image, size, color)
+ self.draw = ImageDraw.Draw(image)
+ self.image = image
+ self.transform = None
+
+ def flush(self):
+ return self.image
+
+ def render(self, op, xy, pen, brush=None):
+ # handle color arguments
+ outline = fill = None
+ width = 1
+ if isinstance(pen, Pen):
+ outline = pen.color
+ width = pen.width
+ elif isinstance(brush, Pen):
+ outline = brush.color
+ width = brush.width
+ if isinstance(brush, Brush):
+ fill = brush.color
+ elif isinstance(pen, Brush):
+ fill = pen.color
+ # handle transformation
+ if self.transform:
+ xy = ImagePath.Path(xy)
+ xy.transform(self.transform)
+ # render the item
+ if op == "line":
+ self.draw.line(xy, fill=outline, width=width)
+ else:
+ getattr(self.draw, op)(xy, fill=fill, outline=outline)
+
+ def settransform(self, offset):
+ """Sets a transformation offset."""
+ (xoffset, yoffset) = offset
+ self.transform = (1, 0, xoffset, 0, 1, yoffset)
+
+ def arc(self, xy, start, end, *options):
+ """
+ Draws an arc (a portion of a circle outline) between the start and end
+ angles, inside the given bounding box.
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.arc`
+ """
+ self.render("arc", xy, start, end, *options)
+
+ def chord(self, xy, start, end, *options):
+ """
+ Same as :py:meth:`~PIL.ImageDraw2.Draw.arc`, but connects the end points
+ with a straight line.
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.chord`
+ """
+ self.render("chord", xy, start, end, *options)
+
+ def ellipse(self, xy, *options):
+ """
+ Draws an ellipse inside the given bounding box.
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.ellipse`
+ """
+ self.render("ellipse", xy, *options)
+
+ def line(self, xy, *options):
+ """
+ Draws a line between the coordinates in the ``xy`` list.
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.line`
+ """
+ self.render("line", xy, *options)
+
+ def pieslice(self, xy, start, end, *options):
+ """
+ Same as arc, but also draws straight lines between the end points and the
+ center of the bounding box.
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.pieslice`
+ """
+ self.render("pieslice", xy, start, end, *options)
+
+ def polygon(self, xy, *options):
+ """
+ Draws a polygon.
+
+ The polygon outline consists of straight lines between the given
+ coordinates, plus a straight line between the last and the first
+ coordinate.
+
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.polygon`
+ """
+ self.render("polygon", xy, *options)
+
+ def rectangle(self, xy, *options):
+ """
+ Draws a rectangle.
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.rectangle`
+ """
+ self.render("rectangle", xy, *options)
+
+ def text(self, xy, text, font):
+ """
+ Draws the string at the given position.
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.text`
+ """
+ if self.transform:
+ xy = ImagePath.Path(xy)
+ xy.transform(self.transform)
+ self.draw.text(xy, text, font=font.font, fill=font.color)
+
+ def textbbox(self, xy, text, font):
+ """
+ Returns bounding box (in pixels) of given text.
+
+ :return: ``(left, top, right, bottom)`` bounding box
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.textbbox`
+ """
+ if self.transform:
+ xy = ImagePath.Path(xy)
+ xy.transform(self.transform)
+ return self.draw.textbbox(xy, text, font=font.font)
+
+ def textlength(self, text, font):
+ """
+ Returns length (in pixels) of given text.
+ This is the amount by which following text should be offset.
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.textlength`
+ """
+ return self.draw.textlength(text, font=font.font)
diff --git a/Lib/site-packages/PIL/ImageEnhance.py b/Lib/site-packages/PIL/ImageEnhance.py
new file mode 100644
index 0000000..93a50d2
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageEnhance.py
@@ -0,0 +1,104 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# image enhancement classes
+#
+# For a background, see "Image Processing By Interpolation and
+# Extrapolation", Paul Haeberli and Douglas Voorhies. Available
+# at http://www.graficaobscura.com/interp/index.html
+#
+# History:
+# 1996-03-23 fl Created
+# 2009-06-16 fl Fixed mean calculation
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1996.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+from . import Image, ImageFilter, ImageStat
+
+
+class _Enhance:
+ def enhance(self, factor):
+ """
+ Returns an enhanced image.
+
+ :param factor: A floating point value controlling the enhancement.
+ Factor 1.0 always returns a copy of the original image,
+ lower factors mean less color (brightness, contrast,
+ etc), and higher values more. There are no restrictions
+ on this value.
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+ return Image.blend(self.degenerate, self.image, factor)
+
+
+class Color(_Enhance):
+ """Adjust image color balance.
+
+ This class can be used to adjust the colour balance of an image, in
+ a manner similar to the controls on a colour TV set. An enhancement
+ factor of 0.0 gives a black and white image. A factor of 1.0 gives
+ the original image.
+ """
+
+ def __init__(self, image):
+ self.image = image
+ self.intermediate_mode = "L"
+ if "A" in image.getbands():
+ self.intermediate_mode = "LA"
+
+ self.degenerate = image.convert(self.intermediate_mode).convert(image.mode)
+
+
+class Contrast(_Enhance):
+ """Adjust image contrast.
+
+ This class can be used to control the contrast of an image, similar
+ to the contrast control on a TV set. An enhancement factor of 0.0
+ gives a solid gray image. A factor of 1.0 gives the original image.
+ """
+
+ def __init__(self, image):
+ self.image = image
+ mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5)
+ self.degenerate = Image.new("L", image.size, mean).convert(image.mode)
+
+ if "A" in image.getbands():
+ self.degenerate.putalpha(image.getchannel("A"))
+
+
+class Brightness(_Enhance):
+ """Adjust image brightness.
+
+ This class can be used to control the brightness of an image. An
+ enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the
+ original image.
+ """
+
+ def __init__(self, image):
+ self.image = image
+ self.degenerate = Image.new(image.mode, image.size, 0)
+
+ if "A" in image.getbands():
+ self.degenerate.putalpha(image.getchannel("A"))
+
+
+class Sharpness(_Enhance):
+ """Adjust image sharpness.
+
+ This class can be used to adjust the sharpness of an image. An
+ enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the
+ original image, and a factor of 2.0 gives a sharpened image.
+ """
+
+ def __init__(self, image):
+ self.image = image
+ self.degenerate = image.filter(ImageFilter.SMOOTH)
+
+ if "A" in image.getbands():
+ self.degenerate.putalpha(image.getchannel("A"))
diff --git a/Lib/site-packages/PIL/ImageFile.py b/Lib/site-packages/PIL/ImageFile.py
new file mode 100644
index 0000000..0923979
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageFile.py
@@ -0,0 +1,795 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# base class for image file handlers
+#
+# history:
+# 1995-09-09 fl Created
+# 1996-03-11 fl Fixed load mechanism.
+# 1996-04-15 fl Added pcx/xbm decoders.
+# 1996-04-30 fl Added encoders.
+# 1996-12-14 fl Added load helpers
+# 1997-01-11 fl Use encode_to_file where possible
+# 1997-08-27 fl Flush output in _save
+# 1998-03-05 fl Use memory mapping for some modes
+# 1999-02-04 fl Use memory mapping also for "I;16" and "I;16B"
+# 1999-05-31 fl Added image parser
+# 2000-10-12 fl Set readonly flag on memory-mapped images
+# 2002-03-20 fl Use better messages for common decoder errors
+# 2003-04-21 fl Fall back on mmap/map_buffer if map is not available
+# 2003-10-30 fl Added StubImageFile class
+# 2004-02-25 fl Made incremental parser more robust
+#
+# Copyright (c) 1997-2004 by Secret Labs AB
+# Copyright (c) 1995-2004 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import io
+import itertools
+import struct
+import sys
+from typing import Any, NamedTuple
+
+from . import Image
+from ._deprecate import deprecate
+from ._util import is_path
+
+MAXBLOCK = 65536
+
+SAFEBLOCK = 1024 * 1024
+
+LOAD_TRUNCATED_IMAGES = False
+"""Whether or not to load truncated image files. User code may change this."""
+
+ERRORS = {
+ -1: "image buffer overrun error",
+ -2: "decoding error",
+ -3: "unknown error",
+ -8: "bad configuration",
+ -9: "out of memory error",
+}
+"""
+Dict of known error codes returned from :meth:`.PyDecoder.decode`,
+:meth:`.PyEncoder.encode` :meth:`.PyEncoder.encode_to_pyfd` and
+:meth:`.PyEncoder.encode_to_file`.
+"""
+
+
+#
+# --------------------------------------------------------------------
+# Helpers
+
+
+def _get_oserror(error, *, encoder):
+ try:
+ msg = Image.core.getcodecstatus(error)
+ except AttributeError:
+ msg = ERRORS.get(error)
+ if not msg:
+ msg = f"{'encoder' if encoder else 'decoder'} error {error}"
+ msg += f" when {'writing' if encoder else 'reading'} image file"
+ return OSError(msg)
+
+
+def raise_oserror(error):
+ deprecate(
+ "raise_oserror",
+ 12,
+ action="It is only useful for translating error codes returned by a codec's "
+ "decode() method, which ImageFile already does automatically.",
+ )
+ raise _get_oserror(error, encoder=False)
+
+
+def _tilesort(t):
+ # sort on offset
+ return t[2]
+
+
+class _Tile(NamedTuple):
+ encoder_name: str
+ extents: tuple[int, int, int, int]
+ offset: int
+ args: tuple[Any, ...] | str | None
+
+
+#
+# --------------------------------------------------------------------
+# ImageFile base class
+
+
+class ImageFile(Image.Image):
+ """Base class for image file format handlers."""
+
+ def __init__(self, fp=None, filename=None):
+ super().__init__()
+
+ self._min_frame = 0
+
+ self.custom_mimetype = None
+
+ self.tile = None
+ """ A list of tile descriptors, or ``None`` """
+
+ self.readonly = 1 # until we know better
+
+ self.decoderconfig = ()
+ self.decodermaxblock = MAXBLOCK
+
+ if is_path(fp):
+ # filename
+ self.fp = open(fp, "rb")
+ self.filename = fp
+ self._exclusive_fp = True
+ else:
+ # stream
+ self.fp = fp
+ self.filename = filename
+ # can be overridden
+ self._exclusive_fp = None
+
+ try:
+ try:
+ self._open()
+ except (
+ IndexError, # end of data
+ TypeError, # end of data (ord)
+ KeyError, # unsupported mode
+ EOFError, # got header but not the first frame
+ struct.error,
+ ) as v:
+ raise SyntaxError(v) from v
+
+ if not self.mode or self.size[0] <= 0 or self.size[1] <= 0:
+ msg = "not identified by this driver"
+ raise SyntaxError(msg)
+ except BaseException:
+ # close the file only if we have opened it this constructor
+ if self._exclusive_fp:
+ self.fp.close()
+ raise
+
+ def get_format_mimetype(self):
+ if self.custom_mimetype:
+ return self.custom_mimetype
+ if self.format is not None:
+ return Image.MIME.get(self.format.upper())
+
+ def __setstate__(self, state):
+ self.tile = []
+ super().__setstate__(state)
+
+ def verify(self):
+ """Check file integrity"""
+
+ # raise exception if something's wrong. must be called
+ # directly after open, and closes file when finished.
+ if self._exclusive_fp:
+ self.fp.close()
+ self.fp = None
+
+ def load(self):
+ """Load image data based on tile list"""
+
+ if self.tile is None:
+ msg = "cannot load this image"
+ raise OSError(msg)
+
+ pixel = Image.Image.load(self)
+ if not self.tile:
+ return pixel
+
+ self.map = None
+ use_mmap = self.filename and len(self.tile) == 1
+ # As of pypy 2.1.0, memory mapping was failing here.
+ use_mmap = use_mmap and not hasattr(sys, "pypy_version_info")
+
+ readonly = 0
+
+ # look for read/seek overrides
+ try:
+ read = self.load_read
+ # don't use mmap if there are custom read/seek functions
+ use_mmap = False
+ except AttributeError:
+ read = self.fp.read
+
+ try:
+ seek = self.load_seek
+ use_mmap = False
+ except AttributeError:
+ seek = self.fp.seek
+
+ if use_mmap:
+ # try memory mapping
+ decoder_name, extents, offset, args = self.tile[0]
+ if isinstance(args, str):
+ args = (args, 0, 1)
+ if (
+ decoder_name == "raw"
+ and len(args) >= 3
+ and args[0] == self.mode
+ and args[0] in Image._MAPMODES
+ ):
+ try:
+ # use mmap, if possible
+ import mmap
+
+ with open(self.filename) as fp:
+ self.map = mmap.mmap(fp.fileno(), 0, access=mmap.ACCESS_READ)
+ if offset + self.size[1] * args[1] > self.map.size():
+ msg = "buffer is not large enough"
+ raise OSError(msg)
+ self.im = Image.core.map_buffer(
+ self.map, self.size, decoder_name, offset, args
+ )
+ readonly = 1
+ # After trashing self.im,
+ # we might need to reload the palette data.
+ if self.palette:
+ self.palette.dirty = 1
+ except (AttributeError, OSError, ImportError):
+ self.map = None
+
+ self.load_prepare()
+ err_code = -3 # initialize to unknown error
+ if not self.map:
+ # sort tiles in file order
+ self.tile.sort(key=_tilesort)
+
+ try:
+ # FIXME: This is a hack to handle TIFF's JpegTables tag.
+ prefix = self.tile_prefix
+ except AttributeError:
+ prefix = b""
+
+ # Remove consecutive duplicates that only differ by their offset
+ self.tile = [
+ list(tiles)[-1]
+ for _, tiles in itertools.groupby(
+ self.tile, lambda tile: (tile[0], tile[1], tile[3])
+ )
+ ]
+ for decoder_name, extents, offset, args in self.tile:
+ seek(offset)
+ decoder = Image._getdecoder(
+ self.mode, decoder_name, args, self.decoderconfig
+ )
+ try:
+ decoder.setimage(self.im, extents)
+ if decoder.pulls_fd:
+ decoder.setfd(self.fp)
+ err_code = decoder.decode(b"")[1]
+ else:
+ b = prefix
+ while True:
+ try:
+ s = read(self.decodermaxblock)
+ except (IndexError, struct.error) as e:
+ # truncated png/gif
+ if LOAD_TRUNCATED_IMAGES:
+ break
+ else:
+ msg = "image file is truncated"
+ raise OSError(msg) from e
+
+ if not s: # truncated jpeg
+ if LOAD_TRUNCATED_IMAGES:
+ break
+ else:
+ msg = (
+ "image file is truncated "
+ f"({len(b)} bytes not processed)"
+ )
+ raise OSError(msg)
+
+ b = b + s
+ n, err_code = decoder.decode(b)
+ if n < 0:
+ break
+ b = b[n:]
+ finally:
+ # Need to cleanup here to prevent leaks
+ decoder.cleanup()
+
+ self.tile = []
+ self.readonly = readonly
+
+ self.load_end()
+
+ if self._exclusive_fp and self._close_exclusive_fp_after_loading:
+ self.fp.close()
+ self.fp = None
+
+ if not self.map and not LOAD_TRUNCATED_IMAGES and err_code < 0:
+ # still raised if decoder fails to return anything
+ raise _get_oserror(err_code, encoder=False)
+
+ return Image.Image.load(self)
+
+ def load_prepare(self):
+ # create image memory if necessary
+ if not self.im or self.im.mode != self.mode or self.im.size != self.size:
+ self.im = Image.core.new(self.mode, self.size)
+ # create palette (optional)
+ if self.mode == "P":
+ Image.Image.load(self)
+
+ def load_end(self):
+ # may be overridden
+ pass
+
+ # may be defined for contained formats
+ # def load_seek(self, pos):
+ # pass
+
+ # may be defined for blocked formats (e.g. PNG)
+ # def load_read(self, bytes):
+ # pass
+
+ def _seek_check(self, frame):
+ if (
+ frame < self._min_frame
+ # Only check upper limit on frames if additional seek operations
+ # are not required to do so
+ or (
+ not (hasattr(self, "_n_frames") and self._n_frames is None)
+ and frame >= self.n_frames + self._min_frame
+ )
+ ):
+ msg = "attempt to seek outside sequence"
+ raise EOFError(msg)
+
+ return self.tell() != frame
+
+
+class StubImageFile(ImageFile):
+ """
+ Base class for stub image loaders.
+
+ A stub loader is an image loader that can identify files of a
+ certain format, but relies on external code to load the file.
+ """
+
+ def _open(self):
+ msg = "StubImageFile subclass must implement _open"
+ raise NotImplementedError(msg)
+
+ def load(self):
+ loader = self._load()
+ if loader is None:
+ msg = f"cannot find loader for this {self.format} file"
+ raise OSError(msg)
+ image = loader.load(self)
+ assert image is not None
+ # become the other object (!)
+ self.__class__ = image.__class__
+ self.__dict__ = image.__dict__
+ return image.load()
+
+ def _load(self):
+ """(Hook) Find actual image loader."""
+ msg = "StubImageFile subclass must implement _load"
+ raise NotImplementedError(msg)
+
+
+class Parser:
+ """
+ Incremental image parser. This class implements the standard
+ feed/close consumer interface.
+ """
+
+ incremental = None
+ image = None
+ data = None
+ decoder = None
+ offset = 0
+ finished = 0
+
+ def reset(self):
+ """
+ (Consumer) Reset the parser. Note that you can only call this
+ method immediately after you've created a parser; parser
+ instances cannot be reused.
+ """
+ assert self.data is None, "cannot reuse parsers"
+
+ def feed(self, data):
+ """
+ (Consumer) Feed data to the parser.
+
+ :param data: A string buffer.
+ :exception OSError: If the parser failed to parse the image file.
+ """
+ # collect data
+
+ if self.finished:
+ return
+
+ if self.data is None:
+ self.data = data
+ else:
+ self.data = self.data + data
+
+ # parse what we have
+ if self.decoder:
+ if self.offset > 0:
+ # skip header
+ skip = min(len(self.data), self.offset)
+ self.data = self.data[skip:]
+ self.offset = self.offset - skip
+ if self.offset > 0 or not self.data:
+ return
+
+ n, e = self.decoder.decode(self.data)
+
+ if n < 0:
+ # end of stream
+ self.data = None
+ self.finished = 1
+ if e < 0:
+ # decoding error
+ self.image = None
+ raise _get_oserror(e, encoder=False)
+ else:
+ # end of image
+ return
+ self.data = self.data[n:]
+
+ elif self.image:
+ # if we end up here with no decoder, this file cannot
+ # be incrementally parsed. wait until we've gotten all
+ # available data
+ pass
+
+ else:
+ # attempt to open this file
+ try:
+ with io.BytesIO(self.data) as fp:
+ im = Image.open(fp)
+ except OSError:
+ pass # not enough data
+ else:
+ flag = hasattr(im, "load_seek") or hasattr(im, "load_read")
+ if flag or len(im.tile) != 1:
+ # custom load code, or multiple tiles
+ self.decode = None
+ else:
+ # initialize decoder
+ im.load_prepare()
+ d, e, o, a = im.tile[0]
+ im.tile = []
+ self.decoder = Image._getdecoder(im.mode, d, a, im.decoderconfig)
+ self.decoder.setimage(im.im, e)
+
+ # calculate decoder offset
+ self.offset = o
+ if self.offset <= len(self.data):
+ self.data = self.data[self.offset :]
+ self.offset = 0
+
+ self.image = im
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
+ def close(self):
+ """
+ (Consumer) Close the stream.
+
+ :returns: An image object.
+ :exception OSError: If the parser failed to parse the image file either
+ because it cannot be identified or cannot be
+ decoded.
+ """
+ # finish decoding
+ if self.decoder:
+ # get rid of what's left in the buffers
+ self.feed(b"")
+ self.data = self.decoder = None
+ if not self.finished:
+ msg = "image was incomplete"
+ raise OSError(msg)
+ if not self.image:
+ msg = "cannot parse this image"
+ raise OSError(msg)
+ if self.data:
+ # incremental parsing not possible; reopen the file
+ # not that we have all data
+ with io.BytesIO(self.data) as fp:
+ try:
+ self.image = Image.open(fp)
+ finally:
+ self.image.load()
+ return self.image
+
+
+# --------------------------------------------------------------------
+
+
+def _save(im, fp, tile, bufsize=0):
+ """Helper to save image based on tile list
+
+ :param im: Image object.
+ :param fp: File object.
+ :param tile: Tile list.
+ :param bufsize: Optional buffer size
+ """
+
+ im.load()
+ if not hasattr(im, "encoderconfig"):
+ im.encoderconfig = ()
+ tile.sort(key=_tilesort)
+ # FIXME: make MAXBLOCK a configuration parameter
+ # It would be great if we could have the encoder specify what it needs
+ # But, it would need at least the image size in most cases. RawEncode is
+ # a tricky case.
+ bufsize = max(MAXBLOCK, bufsize, im.size[0] * 4) # see RawEncode.c
+ try:
+ fh = fp.fileno()
+ fp.flush()
+ _encode_tile(im, fp, tile, bufsize, fh)
+ except (AttributeError, io.UnsupportedOperation) as exc:
+ _encode_tile(im, fp, tile, bufsize, None, exc)
+ if hasattr(fp, "flush"):
+ fp.flush()
+
+
+def _encode_tile(im, fp, tile: list[_Tile], bufsize, fh, exc=None):
+ for encoder_name, extents, offset, args in tile:
+ if offset > 0:
+ fp.seek(offset)
+ encoder = Image._getencoder(im.mode, encoder_name, args, im.encoderconfig)
+ try:
+ encoder.setimage(im.im, extents)
+ if encoder.pushes_fd:
+ encoder.setfd(fp)
+ errcode = encoder.encode_to_pyfd()[1]
+ else:
+ if exc:
+ # compress to Python file-compatible object
+ while True:
+ errcode, data = encoder.encode(bufsize)[1:]
+ fp.write(data)
+ if errcode:
+ break
+ else:
+ # slight speedup: compress to real file object
+ errcode = encoder.encode_to_file(fh, bufsize)
+ if errcode < 0:
+ raise _get_oserror(errcode, encoder=True) from exc
+ finally:
+ encoder.cleanup()
+
+
+def _safe_read(fp, size):
+ """
+ Reads large blocks in a safe way. Unlike fp.read(n), this function
+ doesn't trust the user. If the requested size is larger than
+ SAFEBLOCK, the file is read block by block.
+
+ :param fp: File handle. Must implement a read method.
+ :param size: Number of bytes to read.
+ :returns: A string containing size bytes of data.
+
+ Raises an OSError if the file is truncated and the read cannot be completed
+
+ """
+ if size <= 0:
+ return b""
+ if size <= SAFEBLOCK:
+ data = fp.read(size)
+ if len(data) < size:
+ msg = "Truncated File Read"
+ raise OSError(msg)
+ return data
+ data = []
+ remaining_size = size
+ while remaining_size > 0:
+ block = fp.read(min(remaining_size, SAFEBLOCK))
+ if not block:
+ break
+ data.append(block)
+ remaining_size -= len(block)
+ if sum(len(d) for d in data) < size:
+ msg = "Truncated File Read"
+ raise OSError(msg)
+ return b"".join(data)
+
+
+class PyCodecState:
+ def __init__(self):
+ self.xsize = 0
+ self.ysize = 0
+ self.xoff = 0
+ self.yoff = 0
+
+ def extents(self):
+ return self.xoff, self.yoff, self.xoff + self.xsize, self.yoff + self.ysize
+
+
+class PyCodec:
+ def __init__(self, mode, *args):
+ self.im = None
+ self.state = PyCodecState()
+ self.fd = None
+ self.mode = mode
+ self.init(args)
+
+ def init(self, args):
+ """
+ Override to perform codec specific initialization
+
+ :param args: Array of args items from the tile entry
+ :returns: None
+ """
+ self.args = args
+
+ def cleanup(self):
+ """
+ Override to perform codec specific cleanup
+
+ :returns: None
+ """
+ pass
+
+ def setfd(self, fd):
+ """
+ Called from ImageFile to set the Python file-like object
+
+ :param fd: A Python file-like object
+ :returns: None
+ """
+ self.fd = fd
+
+ def setimage(self, im, extents=None):
+ """
+ Called from ImageFile to set the core output image for the codec
+
+ :param im: A core image object
+ :param extents: a 4 tuple of (x0, y0, x1, y1) defining the rectangle
+ for this tile
+ :returns: None
+ """
+
+ # following c code
+ self.im = im
+
+ if extents:
+ (x0, y0, x1, y1) = extents
+ else:
+ (x0, y0, x1, y1) = (0, 0, 0, 0)
+
+ if x0 == 0 and x1 == 0:
+ self.state.xsize, self.state.ysize = self.im.size
+ else:
+ self.state.xoff = x0
+ self.state.yoff = y0
+ self.state.xsize = x1 - x0
+ self.state.ysize = y1 - y0
+
+ if self.state.xsize <= 0 or self.state.ysize <= 0:
+ msg = "Size cannot be negative"
+ raise ValueError(msg)
+
+ if (
+ self.state.xsize + self.state.xoff > self.im.size[0]
+ or self.state.ysize + self.state.yoff > self.im.size[1]
+ ):
+ msg = "Tile cannot extend outside image"
+ raise ValueError(msg)
+
+
+class PyDecoder(PyCodec):
+ """
+ Python implementation of a format decoder. Override this class and
+ add the decoding logic in the :meth:`decode` method.
+
+ See :ref:`Writing Your Own File Codec in Python`
+ """
+
+ _pulls_fd = False
+
+ @property
+ def pulls_fd(self):
+ return self._pulls_fd
+
+ def decode(self, buffer):
+ """
+ Override to perform the decoding process.
+
+ :param buffer: A bytes object with the data to be decoded.
+ :returns: A tuple of ``(bytes consumed, errcode)``.
+ If finished with decoding return -1 for the bytes consumed.
+ Err codes are from :data:`.ImageFile.ERRORS`.
+ """
+ msg = "unavailable in base decoder"
+ raise NotImplementedError(msg)
+
+ def set_as_raw(self, data, rawmode=None):
+ """
+ Convenience method to set the internal image from a stream of raw data
+
+ :param data: Bytes to be set
+ :param rawmode: The rawmode to be used for the decoder.
+ If not specified, it will default to the mode of the image
+ :returns: None
+ """
+
+ if not rawmode:
+ rawmode = self.mode
+ d = Image._getdecoder(self.mode, "raw", rawmode)
+ d.setimage(self.im, self.state.extents())
+ s = d.decode(data)
+
+ if s[0] >= 0:
+ msg = "not enough image data"
+ raise ValueError(msg)
+ if s[1] != 0:
+ msg = "cannot decode image data"
+ raise ValueError(msg)
+
+
+class PyEncoder(PyCodec):
+ """
+ Python implementation of a format encoder. Override this class and
+ add the decoding logic in the :meth:`encode` method.
+
+ See :ref:`Writing Your Own File Codec in Python`
+ """
+
+ _pushes_fd = False
+
+ @property
+ def pushes_fd(self):
+ return self._pushes_fd
+
+ def encode(self, bufsize):
+ """
+ Override to perform the encoding process.
+
+ :param bufsize: Buffer size.
+ :returns: A tuple of ``(bytes encoded, errcode, bytes)``.
+ If finished with encoding return 1 for the error code.
+ Err codes are from :data:`.ImageFile.ERRORS`.
+ """
+ msg = "unavailable in base encoder"
+ raise NotImplementedError(msg)
+
+ def encode_to_pyfd(self):
+ """
+ If ``pushes_fd`` is ``True``, then this method will be used,
+ and ``encode()`` will only be called once.
+
+ :returns: A tuple of ``(bytes consumed, errcode)``.
+ Err codes are from :data:`.ImageFile.ERRORS`.
+ """
+ if not self.pushes_fd:
+ return 0, -8 # bad configuration
+ bytes_consumed, errcode, data = self.encode(0)
+ if data:
+ self.fd.write(data)
+ return bytes_consumed, errcode
+
+ def encode_to_file(self, fh, bufsize):
+ """
+ :param fh: File handle.
+ :param bufsize: Buffer size.
+
+ :returns: If finished successfully, return 0.
+ Otherwise, return an error code. Err codes are from
+ :data:`.ImageFile.ERRORS`.
+ """
+ errcode = 0
+ while errcode == 0:
+ status, errcode, buf = self.encode(bufsize)
+ if status > 0:
+ fh.write(buf[status:])
+ return errcode
diff --git a/Lib/site-packages/PIL/ImageFilter.py b/Lib/site-packages/PIL/ImageFilter.py
new file mode 100644
index 0000000..035b83c
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageFilter.py
@@ -0,0 +1,568 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# standard filters
+#
+# History:
+# 1995-11-27 fl Created
+# 2002-06-08 fl Added rank and mode filters
+# 2003-09-15 fl Fixed rank calculation in rank filter; added expand call
+#
+# Copyright (c) 1997-2003 by Secret Labs AB.
+# Copyright (c) 1995-2002 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import functools
+
+
+class Filter:
+ pass
+
+
+class MultibandFilter(Filter):
+ pass
+
+
+class BuiltinFilter(MultibandFilter):
+ def filter(self, image):
+ if image.mode == "P":
+ msg = "cannot filter palette images"
+ raise ValueError(msg)
+ return image.filter(*self.filterargs)
+
+
+class Kernel(BuiltinFilter):
+ """
+ Create a convolution kernel. The current version only
+ supports 3x3 and 5x5 integer and floating point kernels.
+
+ In the current version, kernels can only be applied to
+ "L" and "RGB" images.
+
+ :param size: Kernel size, given as (width, height). In the current
+ version, this must be (3,3) or (5,5).
+ :param kernel: A sequence containing kernel weights. The kernel will
+ be flipped vertically before being applied to the image.
+ :param scale: Scale factor. If given, the result for each pixel is
+ divided by this value. The default is the sum of the
+ kernel weights.
+ :param offset: Offset. If given, this value is added to the result,
+ after it has been divided by the scale factor.
+ """
+
+ name = "Kernel"
+
+ def __init__(self, size, kernel, scale=None, offset=0):
+ if scale is None:
+ # default scale is sum of kernel
+ scale = functools.reduce(lambda a, b: a + b, kernel)
+ if size[0] * size[1] != len(kernel):
+ msg = "not enough coefficients in kernel"
+ raise ValueError(msg)
+ self.filterargs = size, scale, offset, kernel
+
+
+class RankFilter(Filter):
+ """
+ Create a rank filter. The rank filter sorts all pixels in
+ a window of the given size, and returns the ``rank``'th value.
+
+ :param size: The kernel size, in pixels.
+ :param rank: What pixel value to pick. Use 0 for a min filter,
+ ``size * size / 2`` for a median filter, ``size * size - 1``
+ for a max filter, etc.
+ """
+
+ name = "Rank"
+
+ def __init__(self, size, rank):
+ self.size = size
+ self.rank = rank
+
+ def filter(self, image):
+ if image.mode == "P":
+ msg = "cannot filter palette images"
+ raise ValueError(msg)
+ image = image.expand(self.size // 2, self.size // 2)
+ return image.rankfilter(self.size, self.rank)
+
+
+class MedianFilter(RankFilter):
+ """
+ Create a median filter. Picks the median pixel value in a window with the
+ given size.
+
+ :param size: The kernel size, in pixels.
+ """
+
+ name = "Median"
+
+ def __init__(self, size=3):
+ self.size = size
+ self.rank = size * size // 2
+
+
+class MinFilter(RankFilter):
+ """
+ Create a min filter. Picks the lowest pixel value in a window with the
+ given size.
+
+ :param size: The kernel size, in pixels.
+ """
+
+ name = "Min"
+
+ def __init__(self, size=3):
+ self.size = size
+ self.rank = 0
+
+
+class MaxFilter(RankFilter):
+ """
+ Create a max filter. Picks the largest pixel value in a window with the
+ given size.
+
+ :param size: The kernel size, in pixels.
+ """
+
+ name = "Max"
+
+ def __init__(self, size=3):
+ self.size = size
+ self.rank = size * size - 1
+
+
+class ModeFilter(Filter):
+ """
+ Create a mode filter. Picks the most frequent pixel value in a box with the
+ given size. Pixel values that occur only once or twice are ignored; if no
+ pixel value occurs more than twice, the original pixel value is preserved.
+
+ :param size: The kernel size, in pixels.
+ """
+
+ name = "Mode"
+
+ def __init__(self, size=3):
+ self.size = size
+
+ def filter(self, image):
+ return image.modefilter(self.size)
+
+
+class GaussianBlur(MultibandFilter):
+ """Blurs the image with a sequence of extended box filters, which
+ approximates a Gaussian kernel. For details on accuracy see
+
+
+ :param radius: Standard deviation of the Gaussian kernel. Either a sequence of two
+ numbers for x and y, or a single number for both.
+ """
+
+ name = "GaussianBlur"
+
+ def __init__(self, radius=2):
+ self.radius = radius
+
+ def filter(self, image):
+ xy = self.radius
+ if not isinstance(xy, (tuple, list)):
+ xy = (xy, xy)
+ if xy == (0, 0):
+ return image.copy()
+ return image.gaussian_blur(xy)
+
+
+class BoxBlur(MultibandFilter):
+ """Blurs the image by setting each pixel to the average value of the pixels
+ in a square box extending radius pixels in each direction.
+ Supports float radius of arbitrary size. Uses an optimized implementation
+ which runs in linear time relative to the size of the image
+ for any radius value.
+
+ :param radius: Size of the box in a direction. Either a sequence of two numbers for
+ x and y, or a single number for both.
+
+ Radius 0 does not blur, returns an identical image.
+ Radius 1 takes 1 pixel in each direction, i.e. 9 pixels in total.
+ """
+
+ name = "BoxBlur"
+
+ def __init__(self, radius):
+ xy = radius
+ if not isinstance(xy, (tuple, list)):
+ xy = (xy, xy)
+ if xy[0] < 0 or xy[1] < 0:
+ msg = "radius must be >= 0"
+ raise ValueError(msg)
+ self.radius = radius
+
+ def filter(self, image):
+ xy = self.radius
+ if not isinstance(xy, (tuple, list)):
+ xy = (xy, xy)
+ if xy == (0, 0):
+ return image.copy()
+ return image.box_blur(xy)
+
+
+class UnsharpMask(MultibandFilter):
+ """Unsharp mask filter.
+
+ See Wikipedia's entry on `digital unsharp masking`_ for an explanation of
+ the parameters.
+
+ :param radius: Blur Radius
+ :param percent: Unsharp strength, in percent
+ :param threshold: Threshold controls the minimum brightness change that
+ will be sharpened
+
+ .. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking
+
+ """
+
+ name = "UnsharpMask"
+
+ def __init__(self, radius=2, percent=150, threshold=3):
+ self.radius = radius
+ self.percent = percent
+ self.threshold = threshold
+
+ def filter(self, image):
+ return image.unsharp_mask(self.radius, self.percent, self.threshold)
+
+
+class BLUR(BuiltinFilter):
+ name = "Blur"
+ # fmt: off
+ filterargs = (5, 5), 16, 0, (
+ 1, 1, 1, 1, 1,
+ 1, 0, 0, 0, 1,
+ 1, 0, 0, 0, 1,
+ 1, 0, 0, 0, 1,
+ 1, 1, 1, 1, 1,
+ )
+ # fmt: on
+
+
+class CONTOUR(BuiltinFilter):
+ name = "Contour"
+ # fmt: off
+ filterargs = (3, 3), 1, 255, (
+ -1, -1, -1,
+ -1, 8, -1,
+ -1, -1, -1,
+ )
+ # fmt: on
+
+
+class DETAIL(BuiltinFilter):
+ name = "Detail"
+ # fmt: off
+ filterargs = (3, 3), 6, 0, (
+ 0, -1, 0,
+ -1, 10, -1,
+ 0, -1, 0,
+ )
+ # fmt: on
+
+
+class EDGE_ENHANCE(BuiltinFilter):
+ name = "Edge-enhance"
+ # fmt: off
+ filterargs = (3, 3), 2, 0, (
+ -1, -1, -1,
+ -1, 10, -1,
+ -1, -1, -1,
+ )
+ # fmt: on
+
+
+class EDGE_ENHANCE_MORE(BuiltinFilter):
+ name = "Edge-enhance More"
+ # fmt: off
+ filterargs = (3, 3), 1, 0, (
+ -1, -1, -1,
+ -1, 9, -1,
+ -1, -1, -1,
+ )
+ # fmt: on
+
+
+class EMBOSS(BuiltinFilter):
+ name = "Emboss"
+ # fmt: off
+ filterargs = (3, 3), 1, 128, (
+ -1, 0, 0,
+ 0, 1, 0,
+ 0, 0, 0,
+ )
+ # fmt: on
+
+
+class FIND_EDGES(BuiltinFilter):
+ name = "Find Edges"
+ # fmt: off
+ filterargs = (3, 3), 1, 0, (
+ -1, -1, -1,
+ -1, 8, -1,
+ -1, -1, -1,
+ )
+ # fmt: on
+
+
+class SHARPEN(BuiltinFilter):
+ name = "Sharpen"
+ # fmt: off
+ filterargs = (3, 3), 16, 0, (
+ -2, -2, -2,
+ -2, 32, -2,
+ -2, -2, -2,
+ )
+ # fmt: on
+
+
+class SMOOTH(BuiltinFilter):
+ name = "Smooth"
+ # fmt: off
+ filterargs = (3, 3), 13, 0, (
+ 1, 1, 1,
+ 1, 5, 1,
+ 1, 1, 1,
+ )
+ # fmt: on
+
+
+class SMOOTH_MORE(BuiltinFilter):
+ name = "Smooth More"
+ # fmt: off
+ filterargs = (5, 5), 100, 0, (
+ 1, 1, 1, 1, 1,
+ 1, 5, 5, 5, 1,
+ 1, 5, 44, 5, 1,
+ 1, 5, 5, 5, 1,
+ 1, 1, 1, 1, 1,
+ )
+ # fmt: on
+
+
+class Color3DLUT(MultibandFilter):
+ """Three-dimensional color lookup table.
+
+ Transforms 3-channel pixels using the values of the channels as coordinates
+ in the 3D lookup table and interpolating the nearest elements.
+
+ This method allows you to apply almost any color transformation
+ in constant time by using pre-calculated decimated tables.
+
+ .. versionadded:: 5.2.0
+
+ :param size: Size of the table. One int or tuple of (int, int, int).
+ Minimal size in any dimension is 2, maximum is 65.
+ :param table: Flat lookup table. A list of ``channels * size**3``
+ float elements or a list of ``size**3`` channels-sized
+ tuples with floats. Channels are changed first,
+ then first dimension, then second, then third.
+ Value 0.0 corresponds lowest value of output, 1.0 highest.
+ :param channels: Number of channels in the table. Could be 3 or 4.
+ Default is 3.
+ :param target_mode: A mode for the result image. Should have not less
+ than ``channels`` channels. Default is ``None``,
+ which means that mode wouldn't be changed.
+ """
+
+ name = "Color 3D LUT"
+
+ def __init__(self, size, table, channels=3, target_mode=None, **kwargs):
+ if channels not in (3, 4):
+ msg = "Only 3 or 4 output channels are supported"
+ raise ValueError(msg)
+ self.size = size = self._check_size(size)
+ self.channels = channels
+ self.mode = target_mode
+
+ # Hidden flag `_copy_table=False` could be used to avoid extra copying
+ # of the table if the table is specially made for the constructor.
+ copy_table = kwargs.get("_copy_table", True)
+ items = size[0] * size[1] * size[2]
+ wrong_size = False
+
+ numpy = None
+ if hasattr(table, "shape"):
+ try:
+ import numpy
+ except ImportError:
+ pass
+
+ if numpy and isinstance(table, numpy.ndarray):
+ if copy_table:
+ table = table.copy()
+
+ if table.shape in [
+ (items * channels,),
+ (items, channels),
+ (size[2], size[1], size[0], channels),
+ ]:
+ table = table.reshape(items * channels)
+ else:
+ wrong_size = True
+
+ else:
+ if copy_table:
+ table = list(table)
+
+ # Convert to a flat list
+ if table and isinstance(table[0], (list, tuple)):
+ table, raw_table = [], table
+ for pixel in raw_table:
+ if len(pixel) != channels:
+ msg = (
+ "The elements of the table should "
+ f"have a length of {channels}."
+ )
+ raise ValueError(msg)
+ table.extend(pixel)
+
+ if wrong_size or len(table) != items * channels:
+ msg = (
+ "The table should have either channels * size**3 float items "
+ "or size**3 items of channels-sized tuples with floats. "
+ f"Table should be: {channels}x{size[0]}x{size[1]}x{size[2]}. "
+ f"Actual length: {len(table)}"
+ )
+ raise ValueError(msg)
+ self.table = table
+
+ @staticmethod
+ def _check_size(size):
+ try:
+ _, _, _ = size
+ except ValueError as e:
+ msg = "Size should be either an integer or a tuple of three integers."
+ raise ValueError(msg) from e
+ except TypeError:
+ size = (size, size, size)
+ size = [int(x) for x in size]
+ for size_1d in size:
+ if not 2 <= size_1d <= 65:
+ msg = "Size should be in [2, 65] range."
+ raise ValueError(msg)
+ return size
+
+ @classmethod
+ def generate(cls, size, callback, channels=3, target_mode=None):
+ """Generates new LUT using provided callback.
+
+ :param size: Size of the table. Passed to the constructor.
+ :param callback: Function with three parameters which correspond
+ three color channels. Will be called ``size**3``
+ times with values from 0.0 to 1.0 and should return
+ a tuple with ``channels`` elements.
+ :param channels: The number of channels which should return callback.
+ :param target_mode: Passed to the constructor of the resulting
+ lookup table.
+ """
+ size_1d, size_2d, size_3d = cls._check_size(size)
+ if channels not in (3, 4):
+ msg = "Only 3 or 4 output channels are supported"
+ raise ValueError(msg)
+
+ table = [0] * (size_1d * size_2d * size_3d * channels)
+ idx_out = 0
+ for b in range(size_3d):
+ for g in range(size_2d):
+ for r in range(size_1d):
+ table[idx_out : idx_out + channels] = callback(
+ r / (size_1d - 1), g / (size_2d - 1), b / (size_3d - 1)
+ )
+ idx_out += channels
+
+ return cls(
+ (size_1d, size_2d, size_3d),
+ table,
+ channels=channels,
+ target_mode=target_mode,
+ _copy_table=False,
+ )
+
+ def transform(self, callback, with_normals=False, channels=None, target_mode=None):
+ """Transforms the table values using provided callback and returns
+ a new LUT with altered values.
+
+ :param callback: A function which takes old lookup table values
+ and returns a new set of values. The number
+ of arguments which function should take is
+ ``self.channels`` or ``3 + self.channels``
+ if ``with_normals`` flag is set.
+ Should return a tuple of ``self.channels`` or
+ ``channels`` elements if it is set.
+ :param with_normals: If true, ``callback`` will be called with
+ coordinates in the color cube as the first
+ three arguments. Otherwise, ``callback``
+ will be called only with actual color values.
+ :param channels: The number of channels in the resulting lookup table.
+ :param target_mode: Passed to the constructor of the resulting
+ lookup table.
+ """
+ if channels not in (None, 3, 4):
+ msg = "Only 3 or 4 output channels are supported"
+ raise ValueError(msg)
+ ch_in = self.channels
+ ch_out = channels or ch_in
+ size_1d, size_2d, size_3d = self.size
+
+ table = [0] * (size_1d * size_2d * size_3d * ch_out)
+ idx_in = 0
+ idx_out = 0
+ for b in range(size_3d):
+ for g in range(size_2d):
+ for r in range(size_1d):
+ values = self.table[idx_in : idx_in + ch_in]
+ if with_normals:
+ values = callback(
+ r / (size_1d - 1),
+ g / (size_2d - 1),
+ b / (size_3d - 1),
+ *values,
+ )
+ else:
+ values = callback(*values)
+ table[idx_out : idx_out + ch_out] = values
+ idx_in += ch_in
+ idx_out += ch_out
+
+ return type(self)(
+ self.size,
+ table,
+ channels=ch_out,
+ target_mode=target_mode or self.mode,
+ _copy_table=False,
+ )
+
+ def __repr__(self):
+ r = [
+ f"{self.__class__.__name__} from {self.table.__class__.__name__}",
+ "size={:d}x{:d}x{:d}".format(*self.size),
+ f"channels={self.channels:d}",
+ ]
+ if self.mode:
+ r.append(f"target_mode={self.mode}")
+ return "<{}>".format(" ".join(r))
+
+ def filter(self, image):
+ from . import Image
+
+ return image.color_lut_3d(
+ self.mode or image.mode,
+ Image.Resampling.BILINEAR,
+ self.channels,
+ self.size[0],
+ self.size[1],
+ self.size[2],
+ self.table,
+ )
diff --git a/Lib/site-packages/PIL/ImageFont.py b/Lib/site-packages/PIL/ImageFont.py
new file mode 100644
index 0000000..8213d03
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageFont.py
@@ -0,0 +1,1264 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# PIL raster font management
+#
+# History:
+# 1996-08-07 fl created (experimental)
+# 1997-08-25 fl minor adjustments to handle fonts from pilfont 0.3
+# 1999-02-06 fl rewrote most font management stuff in C
+# 1999-03-17 fl take pth files into account in load_path (from Richard Jones)
+# 2001-02-17 fl added freetype support
+# 2001-05-09 fl added TransposedFont wrapper class
+# 2002-03-04 fl make sure we have a "L" or "1" font
+# 2002-12-04 fl skip non-directory entries in the system path
+# 2003-04-29 fl add embedded default font
+# 2003-09-27 fl added support for truetype charmap encodings
+#
+# Todo:
+# Adapt to PILFONT2 format (16-bit fonts, compressed, single file)
+#
+# Copyright (c) 1997-2003 by Secret Labs AB
+# Copyright (c) 1996-2003 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+from __future__ import annotations
+
+import base64
+import os
+import sys
+import warnings
+from enum import IntEnum
+from io import BytesIO
+from pathlib import Path
+from typing import BinaryIO
+
+from . import Image
+from ._util import is_directory, is_path
+
+
+class Layout(IntEnum):
+ BASIC = 0
+ RAQM = 1
+
+
+MAX_STRING_LENGTH = 1_000_000
+
+
+try:
+ from . import _imagingft as core
+except ImportError as ex:
+ from ._util import DeferredError
+
+ core = DeferredError.new(ex)
+
+
+def _string_length_check(text):
+ if MAX_STRING_LENGTH is not None and len(text) > MAX_STRING_LENGTH:
+ msg = "too many characters in string"
+ raise ValueError(msg)
+
+
+# FIXME: add support for pilfont2 format (see FontFile.py)
+
+# --------------------------------------------------------------------
+# Font metrics format:
+# "PILfont" LF
+# fontdescriptor LF
+# (optional) key=value... LF
+# "DATA" LF
+# binary data: 256*10*2 bytes (dx, dy, dstbox, srcbox)
+#
+# To place a character, cut out srcbox and paste at dstbox,
+# relative to the character position. Then move the character
+# position according to dx, dy.
+# --------------------------------------------------------------------
+
+
+class ImageFont:
+ """PIL font wrapper"""
+
+ def _load_pilfont(self, filename):
+ with open(filename, "rb") as fp:
+ image = None
+ for ext in (".png", ".gif", ".pbm"):
+ if image:
+ image.close()
+ try:
+ fullname = os.path.splitext(filename)[0] + ext
+ image = Image.open(fullname)
+ except Exception:
+ pass
+ else:
+ if image and image.mode in ("1", "L"):
+ break
+ else:
+ if image:
+ image.close()
+ msg = "cannot find glyph data file"
+ raise OSError(msg)
+
+ self.file = fullname
+
+ self._load_pilfont_data(fp, image)
+ image.close()
+
+ def _load_pilfont_data(self, file, image):
+ # read PILfont header
+ if file.readline() != b"PILfont\n":
+ msg = "Not a PILfont file"
+ raise SyntaxError(msg)
+ file.readline().split(b";")
+ self.info = [] # FIXME: should be a dictionary
+ while True:
+ s = file.readline()
+ if not s or s == b"DATA\n":
+ break
+ self.info.append(s)
+
+ # read PILfont metrics
+ data = file.read(256 * 20)
+
+ # check image
+ if image.mode not in ("1", "L"):
+ msg = "invalid font image mode"
+ raise TypeError(msg)
+
+ image.load()
+
+ self.font = Image.core.font(image.im, data)
+
+ def getmask(self, text, mode="", *args, **kwargs):
+ """
+ Create a bitmap for the text.
+
+ If the font uses antialiasing, the bitmap should have mode ``L`` and use a
+ maximum value of 255. Otherwise, it should have mode ``1``.
+
+ :param text: Text to render.
+ :param mode: Used by some graphics drivers to indicate what mode the
+ driver prefers; if empty, the renderer may return either
+ mode. Note that the mode is always a string, to simplify
+ C-level implementations.
+
+ .. versionadded:: 1.1.5
+
+ :return: An internal PIL storage memory instance as defined by the
+ :py:mod:`PIL.Image.core` interface module.
+ """
+ _string_length_check(text)
+ Image._decompression_bomb_check(self.font.getsize(text))
+ return self.font.getmask(text, mode)
+
+ def getbbox(self, text, *args, **kwargs):
+ """
+ Returns bounding box (in pixels) of given text.
+
+ .. versionadded:: 9.2.0
+
+ :param text: Text to render.
+ :param mode: Used by some graphics drivers to indicate what mode the
+ driver prefers; if empty, the renderer may return either
+ mode. Note that the mode is always a string, to simplify
+ C-level implementations.
+
+ :return: ``(left, top, right, bottom)`` bounding box
+ """
+ _string_length_check(text)
+ width, height = self.font.getsize(text)
+ return 0, 0, width, height
+
+ def getlength(self, text, *args, **kwargs):
+ """
+ Returns length (in pixels) of given text.
+ This is the amount by which following text should be offset.
+
+ .. versionadded:: 9.2.0
+ """
+ _string_length_check(text)
+ width, height = self.font.getsize(text)
+ return width
+
+
+##
+# Wrapper for FreeType fonts. Application code should use the
+# truetype factory function to create font objects.
+
+
+class FreeTypeFont:
+ """FreeType font wrapper (requires _imagingft service)"""
+
+ def __init__(
+ self,
+ font: bytes | str | Path | BinaryIO | None = None,
+ size: float = 10,
+ index: int = 0,
+ encoding: str = "",
+ layout_engine: Layout | None = None,
+ ) -> None:
+ # FIXME: use service provider instead
+
+ if size <= 0:
+ msg = "font size must be greater than 0"
+ raise ValueError(msg)
+
+ self.path = font
+ self.size = size
+ self.index = index
+ self.encoding = encoding
+
+ if layout_engine not in (Layout.BASIC, Layout.RAQM):
+ layout_engine = Layout.BASIC
+ if core.HAVE_RAQM:
+ layout_engine = Layout.RAQM
+ elif layout_engine == Layout.RAQM and not core.HAVE_RAQM:
+ warnings.warn(
+ "Raqm layout was requested, but Raqm is not available. "
+ "Falling back to basic layout."
+ )
+ layout_engine = Layout.BASIC
+
+ self.layout_engine = layout_engine
+
+ def load_from_bytes(f):
+ self.font_bytes = f.read()
+ self.font = core.getfont(
+ "", size, index, encoding, self.font_bytes, layout_engine
+ )
+
+ if is_path(font):
+ if isinstance(font, Path):
+ font = str(font)
+ if sys.platform == "win32":
+ font_bytes_path = font if isinstance(font, bytes) else font.encode()
+ try:
+ font_bytes_path.decode("ascii")
+ except UnicodeDecodeError:
+ # FreeType cannot load fonts with non-ASCII characters on Windows
+ # So load it into memory first
+ with open(font, "rb") as f:
+ load_from_bytes(f)
+ return
+ self.font = core.getfont(
+ font, size, index, encoding, layout_engine=layout_engine
+ )
+ else:
+ load_from_bytes(font)
+
+ def __getstate__(self):
+ return [self.path, self.size, self.index, self.encoding, self.layout_engine]
+
+ def __setstate__(self, state):
+ path, size, index, encoding, layout_engine = state
+ self.__init__(path, size, index, encoding, layout_engine)
+
+ def getname(self):
+ """
+ :return: A tuple of the font family (e.g. Helvetica) and the font style
+ (e.g. Bold)
+ """
+ return self.font.family, self.font.style
+
+ def getmetrics(self):
+ """
+ :return: A tuple of the font ascent (the distance from the baseline to
+ the highest outline point) and descent (the distance from the
+ baseline to the lowest outline point, a negative value)
+ """
+ return self.font.ascent, self.font.descent
+
+ def getlength(self, text, mode="", direction=None, features=None, language=None):
+ """
+ Returns length (in pixels with 1/64 precision) of given text when rendered
+ in font with provided direction, features, and language.
+
+ This is the amount by which following text should be offset.
+ Text bounding box may extend past the length in some fonts,
+ e.g. when using italics or accents.
+
+ The result is returned as a float; it is a whole number if using basic layout.
+
+ Note that the sum of two lengths may not equal the length of a concatenated
+ string due to kerning. If you need to adjust for kerning, include the following
+ character and subtract its length.
+
+ For example, instead of ::
+
+ hello = font.getlength("Hello")
+ world = font.getlength("World")
+ hello_world = hello + world # not adjusted for kerning
+ assert hello_world == font.getlength("HelloWorld") # may fail
+
+ use ::
+
+ hello = font.getlength("HelloW") - font.getlength("W") # adjusted for kerning
+ world = font.getlength("World")
+ hello_world = hello + world # adjusted for kerning
+ assert hello_world == font.getlength("HelloWorld") # True
+
+ or disable kerning with (requires libraqm) ::
+
+ hello = draw.textlength("Hello", font, features=["-kern"])
+ world = draw.textlength("World", font, features=["-kern"])
+ hello_world = hello + world # kerning is disabled, no need to adjust
+ assert hello_world == draw.textlength("HelloWorld", font, features=["-kern"])
+
+ .. versionadded:: 8.0.0
+
+ :param text: Text to measure.
+ :param mode: Used by some graphics drivers to indicate what mode the
+ driver prefers; if empty, the renderer may return either
+ mode. Note that the mode is always a string, to simplify
+ C-level implementations.
+
+ :param direction: Direction of the text. It can be 'rtl' (right to
+ left), 'ltr' (left to right) or 'ttb' (top to bottom).
+ Requires libraqm.
+
+ :param features: A list of OpenType font features to be used during text
+ layout. This is usually used to turn on optional
+ font features that are not enabled by default,
+ for example 'dlig' or 'ss01', but can be also
+ used to turn off default font features for
+ example '-liga' to disable ligatures or '-kern'
+ to disable kerning. To get all supported
+ features, see
+ https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist
+ Requires libraqm.
+
+ :param language: Language of the text. Different languages may use
+ different glyph shapes or ligatures. This parameter tells
+ the font which language the text is in, and to apply the
+ correct substitutions as appropriate, if available.
+ It should be a `BCP 47 language code
+ `_
+ Requires libraqm.
+
+ :return: Either width for horizontal text, or height for vertical text.
+ """
+ _string_length_check(text)
+ return self.font.getlength(text, mode, direction, features, language) / 64
+
+ def getbbox(
+ self,
+ text,
+ mode="",
+ direction=None,
+ features=None,
+ language=None,
+ stroke_width=0,
+ anchor=None,
+ ):
+ """
+ Returns bounding box (in pixels) of given text relative to given anchor
+ when rendered in font with provided direction, features, and language.
+
+ Use :py:meth:`getlength()` to get the offset of following text with
+ 1/64 pixel precision. The bounding box includes extra margins for
+ some fonts, e.g. italics or accents.
+
+ .. versionadded:: 8.0.0
+
+ :param text: Text to render.
+ :param mode: Used by some graphics drivers to indicate what mode the
+ driver prefers; if empty, the renderer may return either
+ mode. Note that the mode is always a string, to simplify
+ C-level implementations.
+
+ :param direction: Direction of the text. It can be 'rtl' (right to
+ left), 'ltr' (left to right) or 'ttb' (top to bottom).
+ Requires libraqm.
+
+ :param features: A list of OpenType font features to be used during text
+ layout. This is usually used to turn on optional
+ font features that are not enabled by default,
+ for example 'dlig' or 'ss01', but can be also
+ used to turn off default font features for
+ example '-liga' to disable ligatures or '-kern'
+ to disable kerning. To get all supported
+ features, see
+ https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist
+ Requires libraqm.
+
+ :param language: Language of the text. Different languages may use
+ different glyph shapes or ligatures. This parameter tells
+ the font which language the text is in, and to apply the
+ correct substitutions as appropriate, if available.
+ It should be a `BCP 47 language code
+ `_
+ Requires libraqm.
+
+ :param stroke_width: The width of the text stroke.
+
+ :param anchor: The text anchor alignment. Determines the relative location of
+ the anchor to the text. The default alignment is top left,
+ specifically ``la`` for horizontal text and ``lt`` for
+ vertical text. See :ref:`text-anchors` for details.
+
+ :return: ``(left, top, right, bottom)`` bounding box
+ """
+ _string_length_check(text)
+ size, offset = self.font.getsize(
+ text, mode, direction, features, language, anchor
+ )
+ left, top = offset[0] - stroke_width, offset[1] - stroke_width
+ width, height = size[0] + 2 * stroke_width, size[1] + 2 * stroke_width
+ return left, top, left + width, top + height
+
+ def getmask(
+ self,
+ text,
+ mode="",
+ direction=None,
+ features=None,
+ language=None,
+ stroke_width=0,
+ anchor=None,
+ ink=0,
+ start=None,
+ ):
+ """
+ Create a bitmap for the text.
+
+ If the font uses antialiasing, the bitmap should have mode ``L`` and use a
+ maximum value of 255. If the font has embedded color data, the bitmap
+ should have mode ``RGBA``. Otherwise, it should have mode ``1``.
+
+ :param text: Text to render.
+ :param mode: Used by some graphics drivers to indicate what mode the
+ driver prefers; if empty, the renderer may return either
+ mode. Note that the mode is always a string, to simplify
+ C-level implementations.
+
+ .. versionadded:: 1.1.5
+
+ :param direction: Direction of the text. It can be 'rtl' (right to
+ left), 'ltr' (left to right) or 'ttb' (top to bottom).
+ Requires libraqm.
+
+ .. versionadded:: 4.2.0
+
+ :param features: A list of OpenType font features to be used during text
+ layout. This is usually used to turn on optional
+ font features that are not enabled by default,
+ for example 'dlig' or 'ss01', but can be also
+ used to turn off default font features for
+ example '-liga' to disable ligatures or '-kern'
+ to disable kerning. To get all supported
+ features, see
+ https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist
+ Requires libraqm.
+
+ .. versionadded:: 4.2.0
+
+ :param language: Language of the text. Different languages may use
+ different glyph shapes or ligatures. This parameter tells
+ the font which language the text is in, and to apply the
+ correct substitutions as appropriate, if available.
+ It should be a `BCP 47 language code
+ `_
+ Requires libraqm.
+
+ .. versionadded:: 6.0.0
+
+ :param stroke_width: The width of the text stroke.
+
+ .. versionadded:: 6.2.0
+
+ :param anchor: The text anchor alignment. Determines the relative location of
+ the anchor to the text. The default alignment is top left,
+ specifically ``la`` for horizontal text and ``lt`` for
+ vertical text. See :ref:`text-anchors` for details.
+
+ .. versionadded:: 8.0.0
+
+ :param ink: Foreground ink for rendering in RGBA mode.
+
+ .. versionadded:: 8.0.0
+
+ :param start: Tuple of horizontal and vertical offset, as text may render
+ differently when starting at fractional coordinates.
+
+ .. versionadded:: 9.4.0
+
+ :return: An internal PIL storage memory instance as defined by the
+ :py:mod:`PIL.Image.core` interface module.
+ """
+ return self.getmask2(
+ text,
+ mode,
+ direction=direction,
+ features=features,
+ language=language,
+ stroke_width=stroke_width,
+ anchor=anchor,
+ ink=ink,
+ start=start,
+ )[0]
+
+ def getmask2(
+ self,
+ text,
+ mode="",
+ direction=None,
+ features=None,
+ language=None,
+ stroke_width=0,
+ anchor=None,
+ ink=0,
+ start=None,
+ *args,
+ **kwargs,
+ ):
+ """
+ Create a bitmap for the text.
+
+ If the font uses antialiasing, the bitmap should have mode ``L`` and use a
+ maximum value of 255. If the font has embedded color data, the bitmap
+ should have mode ``RGBA``. Otherwise, it should have mode ``1``.
+
+ :param text: Text to render.
+ :param mode: Used by some graphics drivers to indicate what mode the
+ driver prefers; if empty, the renderer may return either
+ mode. Note that the mode is always a string, to simplify
+ C-level implementations.
+
+ .. versionadded:: 1.1.5
+
+ :param direction: Direction of the text. It can be 'rtl' (right to
+ left), 'ltr' (left to right) or 'ttb' (top to bottom).
+ Requires libraqm.
+
+ .. versionadded:: 4.2.0
+
+ :param features: A list of OpenType font features to be used during text
+ layout. This is usually used to turn on optional
+ font features that are not enabled by default,
+ for example 'dlig' or 'ss01', but can be also
+ used to turn off default font features for
+ example '-liga' to disable ligatures or '-kern'
+ to disable kerning. To get all supported
+ features, see
+ https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist
+ Requires libraqm.
+
+ .. versionadded:: 4.2.0
+
+ :param language: Language of the text. Different languages may use
+ different glyph shapes or ligatures. This parameter tells
+ the font which language the text is in, and to apply the
+ correct substitutions as appropriate, if available.
+ It should be a `BCP 47 language code
+ `_
+ Requires libraqm.
+
+ .. versionadded:: 6.0.0
+
+ :param stroke_width: The width of the text stroke.
+
+ .. versionadded:: 6.2.0
+
+ :param anchor: The text anchor alignment. Determines the relative location of
+ the anchor to the text. The default alignment is top left,
+ specifically ``la`` for horizontal text and ``lt`` for
+ vertical text. See :ref:`text-anchors` for details.
+
+ .. versionadded:: 8.0.0
+
+ :param ink: Foreground ink for rendering in RGBA mode.
+
+ .. versionadded:: 8.0.0
+
+ :param start: Tuple of horizontal and vertical offset, as text may render
+ differently when starting at fractional coordinates.
+
+ .. versionadded:: 9.4.0
+
+ :return: A tuple of an internal PIL storage memory instance as defined by the
+ :py:mod:`PIL.Image.core` interface module, and the text offset, the
+ gap between the starting coordinate and the first marking
+ """
+ _string_length_check(text)
+ if start is None:
+ start = (0, 0)
+ im = None
+ size = None
+
+ def fill(width, height):
+ nonlocal im, size
+
+ size = (width, height)
+ if Image.MAX_IMAGE_PIXELS is not None:
+ pixels = max(1, width) * max(1, height)
+ if pixels > 2 * Image.MAX_IMAGE_PIXELS:
+ return
+
+ im = Image.core.fill("RGBA" if mode == "RGBA" else "L", size)
+ return im
+
+ offset = self.font.render(
+ text,
+ fill,
+ mode,
+ direction,
+ features,
+ language,
+ stroke_width,
+ anchor,
+ ink,
+ start[0],
+ start[1],
+ )
+ Image._decompression_bomb_check(size)
+ return im, offset
+
+ def font_variant(
+ self, font=None, size=None, index=None, encoding=None, layout_engine=None
+ ):
+ """
+ Create a copy of this FreeTypeFont object,
+ using any specified arguments to override the settings.
+
+ Parameters are identical to the parameters used to initialize this
+ object.
+
+ :return: A FreeTypeFont object.
+ """
+ if font is None:
+ try:
+ font = BytesIO(self.font_bytes)
+ except AttributeError:
+ font = self.path
+ return FreeTypeFont(
+ font=font,
+ size=self.size if size is None else size,
+ index=self.index if index is None else index,
+ encoding=self.encoding if encoding is None else encoding,
+ layout_engine=layout_engine or self.layout_engine,
+ )
+
+ def get_variation_names(self):
+ """
+ :returns: A list of the named styles in a variation font.
+ :exception OSError: If the font is not a variation font.
+ """
+ try:
+ names = self.font.getvarnames()
+ except AttributeError as e:
+ msg = "FreeType 2.9.1 or greater is required"
+ raise NotImplementedError(msg) from e
+ return [name.replace(b"\x00", b"") for name in names]
+
+ def set_variation_by_name(self, name):
+ """
+ :param name: The name of the style.
+ :exception OSError: If the font is not a variation font.
+ """
+ names = self.get_variation_names()
+ if not isinstance(name, bytes):
+ name = name.encode()
+ index = names.index(name) + 1
+
+ if index == getattr(self, "_last_variation_index", None):
+ # When the same name is set twice in a row,
+ # there is an 'unknown freetype error'
+ # https://savannah.nongnu.org/bugs/?56186
+ return
+ self._last_variation_index = index
+
+ self.font.setvarname(index)
+
+ def get_variation_axes(self):
+ """
+ :returns: A list of the axes in a variation font.
+ :exception OSError: If the font is not a variation font.
+ """
+ try:
+ axes = self.font.getvaraxes()
+ except AttributeError as e:
+ msg = "FreeType 2.9.1 or greater is required"
+ raise NotImplementedError(msg) from e
+ for axis in axes:
+ axis["name"] = axis["name"].replace(b"\x00", b"")
+ return axes
+
+ def set_variation_by_axes(self, axes):
+ """
+ :param axes: A list of values for each axis.
+ :exception OSError: If the font is not a variation font.
+ """
+ try:
+ self.font.setvaraxes(axes)
+ except AttributeError as e:
+ msg = "FreeType 2.9.1 or greater is required"
+ raise NotImplementedError(msg) from e
+
+
+class TransposedFont:
+ """Wrapper for writing rotated or mirrored text"""
+
+ def __init__(self, font, orientation=None):
+ """
+ Wrapper that creates a transposed font from any existing font
+ object.
+
+ :param font: A font object.
+ :param orientation: An optional orientation. If given, this should
+ be one of Image.Transpose.FLIP_LEFT_RIGHT, Image.Transpose.FLIP_TOP_BOTTOM,
+ Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_180, or
+ Image.Transpose.ROTATE_270.
+ """
+ self.font = font
+ self.orientation = orientation # any 'transpose' argument, or None
+
+ def getmask(self, text, mode="", *args, **kwargs):
+ im = self.font.getmask(text, mode, *args, **kwargs)
+ if self.orientation is not None:
+ return im.transpose(self.orientation)
+ return im
+
+ def getbbox(self, text, *args, **kwargs):
+ # TransposedFont doesn't support getmask2, move top-left point to (0, 0)
+ # this has no effect on ImageFont and simulates anchor="lt" for FreeTypeFont
+ left, top, right, bottom = self.font.getbbox(text, *args, **kwargs)
+ width = right - left
+ height = bottom - top
+ if self.orientation in (Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_270):
+ return 0, 0, height, width
+ return 0, 0, width, height
+
+ def getlength(self, text, *args, **kwargs):
+ if self.orientation in (Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_270):
+ msg = "text length is undefined for text rotated by 90 or 270 degrees"
+ raise ValueError(msg)
+ return self.font.getlength(text, *args, **kwargs)
+
+
+def load(filename):
+ """
+ Load a font file. This function loads a font object from the given
+ bitmap font file, and returns the corresponding font object.
+
+ :param filename: Name of font file.
+ :return: A font object.
+ :exception OSError: If the file could not be read.
+ """
+ f = ImageFont()
+ f._load_pilfont(filename)
+ return f
+
+
+def truetype(font=None, size=10, index=0, encoding="", layout_engine=None):
+ """
+ Load a TrueType or OpenType font from a file or file-like object,
+ and create a font object.
+ This function loads a font object from the given file or file-like
+ object, and creates a font object for a font of the given size.
+
+ Pillow uses FreeType to open font files. On Windows, be aware that FreeType
+ will keep the file open as long as the FreeTypeFont object exists. Windows
+ limits the number of files that can be open in C at once to 512, so if many
+ fonts are opened simultaneously and that limit is approached, an
+ ``OSError`` may be thrown, reporting that FreeType "cannot open resource".
+ A workaround would be to copy the file(s) into memory, and open that instead.
+
+ This function requires the _imagingft service.
+
+ :param font: A filename or file-like object containing a TrueType font.
+ If the file is not found in this filename, the loader may also
+ search in other directories, such as the :file:`fonts/`
+ directory on Windows or :file:`/Library/Fonts/`,
+ :file:`/System/Library/Fonts/` and :file:`~/Library/Fonts/` on
+ macOS.
+
+ :param size: The requested size, in pixels.
+ :param index: Which font face to load (default is first available face).
+ :param encoding: Which font encoding to use (default is Unicode). Possible
+ encodings include (see the FreeType documentation for more
+ information):
+
+ * "unic" (Unicode)
+ * "symb" (Microsoft Symbol)
+ * "ADOB" (Adobe Standard)
+ * "ADBE" (Adobe Expert)
+ * "ADBC" (Adobe Custom)
+ * "armn" (Apple Roman)
+ * "sjis" (Shift JIS)
+ * "gb " (PRC)
+ * "big5"
+ * "wans" (Extended Wansung)
+ * "joha" (Johab)
+ * "lat1" (Latin-1)
+
+ This specifies the character set to use. It does not alter the
+ encoding of any text provided in subsequent operations.
+ :param layout_engine: Which layout engine to use, if available:
+ :attr:`.ImageFont.Layout.BASIC` or :attr:`.ImageFont.Layout.RAQM`.
+ If it is available, Raqm layout will be used by default.
+ Otherwise, basic layout will be used.
+
+ Raqm layout is recommended for all non-English text. If Raqm layout
+ is not required, basic layout will have better performance.
+
+ You can check support for Raqm layout using
+ :py:func:`PIL.features.check_feature` with ``feature="raqm"``.
+
+ .. versionadded:: 4.2.0
+ :return: A font object.
+ :exception OSError: If the file could not be read.
+ :exception ValueError: If the font size is not greater than zero.
+ """
+
+ def freetype(font):
+ return FreeTypeFont(font, size, index, encoding, layout_engine)
+
+ try:
+ return freetype(font)
+ except OSError:
+ if not is_path(font):
+ raise
+ ttf_filename = os.path.basename(font)
+
+ dirs = []
+ if sys.platform == "win32":
+ # check the windows font repository
+ # NOTE: must use uppercase WINDIR, to work around bugs in
+ # 1.5.2's os.environ.get()
+ windir = os.environ.get("WINDIR")
+ if windir:
+ dirs.append(os.path.join(windir, "fonts"))
+ elif sys.platform in ("linux", "linux2"):
+ lindirs = os.environ.get("XDG_DATA_DIRS")
+ if not lindirs:
+ # According to the freedesktop spec, XDG_DATA_DIRS should
+ # default to /usr/share
+ lindirs = "/usr/share"
+ dirs += [os.path.join(lindir, "fonts") for lindir in lindirs.split(":")]
+ elif sys.platform == "darwin":
+ dirs += [
+ "/Library/Fonts",
+ "/System/Library/Fonts",
+ os.path.expanduser("~/Library/Fonts"),
+ ]
+
+ ext = os.path.splitext(ttf_filename)[1]
+ first_font_with_a_different_extension = None
+ for directory in dirs:
+ for walkroot, walkdir, walkfilenames in os.walk(directory):
+ for walkfilename in walkfilenames:
+ if ext and walkfilename == ttf_filename:
+ return freetype(os.path.join(walkroot, walkfilename))
+ elif not ext and os.path.splitext(walkfilename)[0] == ttf_filename:
+ fontpath = os.path.join(walkroot, walkfilename)
+ if os.path.splitext(fontpath)[1] == ".ttf":
+ return freetype(fontpath)
+ if not ext and first_font_with_a_different_extension is None:
+ first_font_with_a_different_extension = fontpath
+ if first_font_with_a_different_extension:
+ return freetype(first_font_with_a_different_extension)
+ raise
+
+
+def load_path(filename):
+ """
+ Load font file. Same as :py:func:`~PIL.ImageFont.load`, but searches for a
+ bitmap font along the Python path.
+
+ :param filename: Name of font file.
+ :return: A font object.
+ :exception OSError: If the file could not be read.
+ """
+ for directory in sys.path:
+ if is_directory(directory):
+ if not isinstance(filename, str):
+ filename = filename.decode("utf-8")
+ try:
+ return load(os.path.join(directory, filename))
+ except OSError:
+ pass
+ msg = "cannot find font file"
+ raise OSError(msg)
+
+
+def load_default(size=None):
+ """If FreeType support is available, load a version of Aileron Regular,
+ https://dotcolon.net/font/aileron, with a more limited character set.
+
+ Otherwise, load a "better than nothing" font.
+
+ .. versionadded:: 1.1.4
+
+ :param size: The font size of Aileron Regular.
+
+ .. versionadded:: 10.1.0
+
+ :return: A font object.
+ """
+ if core.__class__.__name__ == "module" or size is not None:
+ f = truetype(
+ BytesIO(
+ base64.b64decode(
+ b"""
+AAEAAAAPAIAAAwBwRkZUTYwDlUAAADFoAAAAHEdERUYAqADnAAAo8AAAACRHUE9ThhmITwAAKfgAA
+AduR1NVQnHxefoAACkUAAAA4k9TLzJovoHLAAABeAAAAGBjbWFw5lFQMQAAA6gAAAGqZ2FzcP//AA
+MAACjoAAAACGdseWYmRXoPAAAGQAAAHfhoZWFkE18ayQAAAPwAAAA2aGhlYQboArEAAAE0AAAAJGh
+tdHjjERZ8AAAB2AAAAdBsb2NhuOexrgAABVQAAADqbWF4cAC7AEYAAAFYAAAAIG5hbWUr+h5lAAAk
+OAAAA6Jwb3N0D3oPTQAAJ9wAAAEKAAEAAAABGhxJDqIhXw889QALA+gAAAAA0Bqf2QAAAADhCh2h/
+2r/LgOxAyAAAAAIAAIAAAAAAAAAAQAAA8r/GgAAA7j/av9qA7EAAQAAAAAAAAAAAAAAAAAAAHQAAQ
+AAAHQAQwAFAAAAAAACAAAAAQABAAAAQAAAAAAAAAADAfoBkAAFAAgCigJYAAAASwKKAlgAAAFeADI
+BPgAAAAAFAAAAAAAAAAAAAAcAAAAAAAAAAAAAAABVS1dOAEAAIPsCAwL/GgDIA8oA5iAAAJMAAAAA
+AhICsgAAACAAAwH0AAAAAAAAAU0AAADYAAAA8gA5AVMAVgJEAEYCRAA1AuQAKQKOAEAAsAArATsAZ
+AE7AB4CMABVAkQAUADc/+EBEgAgANwAJQEv//sCRAApAkQAggJEADwCRAAtAkQAIQJEADkCRAArAk
+QAMgJEACwCRAAxANwAJQDc/+ECRABnAkQAUAJEAEQB8wAjA1QANgJ/AB0CcwBkArsALwLFAGQCSwB
+kAjcAZALGAC8C2gBkAQgAZAIgADcCYQBkAj8AZANiAGQCzgBkAuEALwJWAGQC3QAvAmsAZAJJADQC
+ZAAiAqoAXgJuACADuAAaAnEAGQJFABMCTwAuATMAYgEv//sBJwAiAkQAUAH0ADIBLAApAhMAJAJjA
+EoCEQAeAmcAHgIlAB4BIgAVAmcAHgJRAEoA7gA+AOn/8wIKAEoA9wBGA1cASgJRAEoCSgAeAmMASg
+JnAB4BSgBKAcsAGAE5ABQCUABCAgIAAQMRAAEB4v/6AgEAAQHOABQBLwBAAPoAYAEvACECRABNA0Y
+AJAItAHgBKgAcAkQAUAEsAHQAygAgAi0AOQD3ADYA9wAWAaEANgGhABYCbAAlAYMAeAGDADkA6/9q
+AhsAFAIKABUB/QAVAAAAAwAAAAMAAAAcAAEAAAAAAKQAAwABAAAAHAAEAIgAAAAeABAAAwAOAH4Aq
+QCrALEAtAC3ALsgGSAdICYgOiBEISL7Av//AAAAIACpAKsAsAC0ALcAuyAYIBwgJiA5IEQhIvsB//
+//4/+5/7j/tP+y/7D/reBR4E/gR+A14CzfTwVxAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAEGAAABAAAAAAAAAAECAAAAAgAAAAAAAAAAAAAAAAAAAAEAAAMEBQYHCAkKCwwNDg8QERIT
+FBUWFxgZGhscHR4fICEiIyQlJicoKSorLC0uLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVGR0hJSktMT
+U5PUFFSU1RVVldYWVpbXF1eX2BhAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGQAAA
+AAAAAAYnFmAAAAAABlAAAAAAAAAAAAAAAAAAAAAAAAAAAAY2htAAAAAAAAAABrbGlqAAAAAHAAbm9
+ycwBnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmACYAJgAmAD4AUgCCAMoBCgFO
+AVwBcgGIAaYBvAHKAdYB6AH2AgwCIAJKAogCpgLWAw4DIgNkA5wDugPUA+gD/AQQBEYEogS8BPoFJ
+gVSBWoFgAWwBcoF1gX6BhQGJAZMBmgGiga0BuIHGgdUB2YHkAeiB8AH3AfyCAoIHAgqCDoITghcCG
+oIogjSCPoJKglYCXwJwgnqCgIKKApACl4Klgq8CtwLDAs8C1YLjAuyC9oL7gwMDCYMSAxgDKAMrAz
+qDQoNTA1mDYQNoA2uDcAN2g3oDfYODA4iDkoOXA5sDnoOnA7EDvwAAAAFAAAAAAH0ArwAAwAGAAkA
+DAAPAAAxESERAxMhExcRASELARETAfT6qv6syKr+jgFUqsiqArz9RAGLAP/+1P8B/v3VAP8BLP4CA
+P8AAgA5//IAuQKyAAMACwAANyMDMwIyFhQGIiY0oE4MZk84JCQ4JLQB/v3AJDgkJDgAAgBWAeUBPA
+LfAAMABwAAEyMnMxcjJzOmRgpagkYKWgHl+vr6AAAAAAIARgAAAf4CsgAbAB8AAAEHMxUjByM3Iwc
+jNyM1MzcjNTM3MwczNzMHMxUrAQczAZgdZXEvOi9bLzovWmYdZXEvOi9bLzovWp9bHlsBn4w429vb
+2ziMONvb29s4jAAAAAMANf+mAg4DDAAfACYALAAAJRQGBxUjNS4BJzMeARcRLgE0Njc1MxUeARcjJ
+icVHgEBFBYXNQ4BExU+ATU0Ag5xWDpgcgRcBz41Xl9oVTpVYwpcC1ttXP6cLTQuM5szOrVRZwlOTQ
+ZqVzZECAEAGlukZAlOTQdrUG8O7iNlAQgxNhDlCDj+8/YGOjReAAAAAAUAKf/yArsCvAAHAAsAFQA
+dACcAABIyFhQGIiY0EyMBMwQiBhUUFjI2NTQSMhYUBiImNDYiBhUUFjI2NTR5iFBQiFCVVwHAV/5c
+OiMjOiPmiFBQiFCxOiMjOiMCvFaSVlaS/ZoCsjIzMC80NC8w/uNWklZWkhozMC80NC8wAAAAAgBA/
+/ICbgLAACIALgAAARUjEQYjIiY1NDY3LgE1NDYzMhcVJiMiBhUUFhcWOwE1MxUFFBYzMjc1IyIHDg
+ECbmBcYYOOVkg7R4hsQjY4Q0RNRD4SLDxW/pJUXzksPCkUUk0BgUb+zBVUZ0BkDw5RO1huCkULQzp
+COAMBcHDHRz0J/AIHRQAAAAEAKwHlAIUC3wADAAATIycze0YKWgHl+gAAAAABAGT/sAEXAwwACQAA
+EzMGEBcjLgE0Nt06dXU6OUBAAwzG/jDGVePs4wAAAAEAHv+wANEDDAAJAAATMx4BFAYHIzYQHjo5Q
+EA5OnUDDFXj7ONVxgHQAAAAAQBVAFIB2wHbAA4AAAE3FwcXBycHJzcnNxcnMwEtmxOfcTJjYzJxnx
+ObCj4BKD07KYolmZkliik7PbMAAQBQAFUB9AIlAAsAAAEjFSM1IzUzNTMVMwH0tTq1tTq1AR/Kyjj
+OzgAAAAAB/+H/iACMAGQABAAANwcjNzOMWlFOXVrS3AAAAQAgAP8A8gE3AAMAABMjNTPy0tIA/zgA
+AQAl//IApQByAAcAADYyFhQGIiY0STgkJDgkciQ4JCQ4AAAAAf/7/+IBNALQAAMAABcjEzM5Pvs+H
+gLuAAAAAAIAKf/yAhsCwAADAAcAABIgECA2IBAgKQHy/g5gATL+zgLA/TJEAkYAAAAAAQCCAAABlg
+KyAAgAAAERIxEHNTc2MwGWVr6SIygCsv1OAldxW1sWAAEAPAAAAg4CwAAZAAA3IRUhNRM+ATU0JiM
+iDwEjNz4BMzIWFRQGB7kBUv4x+kI2QTt+EAFWAQp8aGVtSl5GRjEA/0RVLzlLmAoKa3FsUkNxXQAA
+AAEALf/yAhYCwAAqAAABHgEVFAYjIi8BMxceATMyNjU0KwE1MzI2NTQmIyIGDwEjNz4BMzIWFRQGA
+YxBSZJo2RUBVgEHV0JBUaQREUBUQzc5TQcBVgEKfGhfcEMBbxJbQl1x0AoKRkZHPn9GSD80QUVCCg
+pfbGBPOlgAAAACACEAAAIkArIACgAPAAAlIxUjNSE1ATMRMyMRBg8BAiRXVv6qAVZWV60dHLCurq4
+rAdn+QgFLMibzAAABADn/8gIZArIAHQAAATIWFRQGIyIvATMXFjMyNjU0JiMiByMTIRUhBzc2ATNv
+d5Fl1RQBVgIad0VSTkVhL1IwAYj+vh8rMAHHgGdtgcUKCoFXTU5bYgGRRvAuHQAAAAACACv/8gITA
+sAAFwAjAAABMhYVFAYjIhE0NjMyFh8BIycmIyIDNzYTMjY1NCYjIgYVFBYBLmp7imr0l3RZdAgBXA
+IYZ5wKJzU6QVNJSz5SUAHSgWltiQFGxcNlVQoKdv7sPiz+ZF1LTmJbU0lhAAAAAQAyAAACGgKyAAY
+AAAEVASMBITUCGv6oXAFL/oECsij9dgJsRgAAAAMALP/xAhgCwAAWACAALAAAAR4BFRQGIyImNTQ2
+Ny4BNTQ2MhYVFAYmIgYVFBYyNjU0AzI2NTQmIyIGFRQWAZQ5S5BmbIpPOjA7ecp5P2F8Q0J8RIVJS
+0pLTEtOAW0TXTxpZ2ZqPF0SE1A3VWVlVTdQ/UU0N0RENzT9/ko+Ok1NOj1LAAIAMf/yAhkCwAAXAC
+MAAAEyERQGIyImLwEzFxYzMhMHBiMiJjU0NhMyNjU0JiMiBhUUFgEl9Jd0WXQIAVwCGGecCic1SWp
+7imo+UlBAQVNJAsD+usXDZVUKCnYBFD4sgWltif5kW1NJYV1LTmIAAAACACX/8gClAiAABwAPAAAS
+MhYUBiImNBIyFhQGIiY0STgkJDgkJDgkJDgkAiAkOCQkOP52JDgkJDgAAAAC/+H/iAClAiAABwAMA
+AASMhYUBiImNBMHIzczSTgkJDgkaFpSTl4CICQ4JCQ4/mba5gAAAQBnAB4B+AH0AAYAAAENARUlNS
+UB+P6qAVb+bwGRAbCmpkbJRMkAAAIAUAC7AfQBuwADAAcAAAEhNSERITUhAfT+XAGk/lwBpAGDOP8
+AOAABAEQAHgHVAfQABgAAARUFNS0BNQHV/m8BVv6qAStEyUSmpkYAAAAAAgAj//IB1ALAABgAIAAA
+ATIWFRQHDgEHIz4BNz4BNTQmIyIGByM+ARIyFhQGIiY0AQRibmktIAJWBSEqNig+NTlHBFoDezQ4J
+CQ4JALAZ1BjaS03JS1DMD5LLDQ/SUVgcv2yJDgkJDgAAAAAAgA2/5gDFgKYADYAQgAAAQMGFRQzMj
+Y1NCYjIg4CFRQWMzI2NxcGIyImNTQ+AjMyFhUUBiMiJwcGIyImNTQ2MzIfATcHNzYmIyIGFRQzMjY
+Cej8EJjJJlnBAfGQ+oHtAhjUYg5OPx0h2k06Os3xRWQsVLjY5VHtdPBwJETcJDyUoOkZEJz8B0f74
+EQ8kZl6EkTFZjVOLlyknMVm1pmCiaTq4lX6CSCknTVRmmR8wPdYnQzxuSWVGAAIAHQAAAncCsgAHA
+AoAACUjByMTMxMjATMDAcj+UVz4dO5d/sjPZPT0ArL9TgE6ATQAAAADAGQAAAJMArIAEAAbACcAAA
+EeARUUBgcGKwERMzIXFhUUJRUzMjc2NTQnJiMTPgE1NCcmKwEVMzIBvkdHZkwiNt7LOSGq/oeFHBt
+hahIlSTM+cB8Yj5UWAW8QT0VYYgwFArIEF5Fv1eMED2NfDAL93AU+N24PBP0AAAAAAQAv//ICjwLA
+ABsAAAEyFh8BIycmIyIGFRQWMzI/ATMHDgEjIiY1NDYBdX+PCwFWAiKiaHx5ZaIiAlYBCpWBk6a0A
+sCAagoKpqN/gaOmCgplhcicn8sAAAIAZAAAAp8CsgAMABkAAAEeARUUBgcGKwERMzITPgE1NCYnJi
+sBETMyAY59lJp8IzXN0jUVWmdjWRs5d3I4Aq4QqJWUug8EArL9mQ+PeHGHDgX92gAAAAABAGQAAAI
+vArIACwAAJRUhESEVIRUhFSEVAi/+NQHB/pUBTf6zRkYCskbwRvAAAAABAGQAAAIlArIACQAAExUh
+FSERIxEhFboBQ/69VgHBAmzwRv7KArJGAAAAAAEAL//yAo8CwAAfAAABMxEjNQcGIyImNTQ2MzIWH
+wEjJyYjIgYVFBYzMjY1IwGP90wfPnWTprSSf48LAVYCIqJofHllVG+hAU3+s3hARsicn8uAagoKpq
+N/gaN1XAAAAAEAZAAAAowCsgALAAABESMRIREjETMRIRECjFb+hFZWAXwCsv1OAS7+0gKy/sQBPAA
+AAAABAGQAAAC6ArIAAwAAMyMRM7pWVgKyAAABADf/8gHoArIAEwAAAREUBw4BIyImLwEzFxYzMjc2
+NREB6AIFcGpgbQIBVgIHfXQKAQKy/lYxIltob2EpKYyEFD0BpwAAAAABAGQAAAJ0ArIACwAACQEjA
+wcVIxEzEQEzATsBJ3ntQlZWAVVlAWH+nwEnR+ACsv6RAW8AAQBkAAACLwKyAAUAACUVIREzEQIv/j
+VWRkYCsv2UAAABAGQAAAMUArIAFAAAAREjETQ3BgcDIwMmJxYVESMRMxsBAxRWAiMxemx8NxsCVo7
+MywKy/U4BY7ZLco7+nAFmoFxLtP6dArL9lwJpAAAAAAEAZAAAAoACsgANAAAhIwEWFREjETMBJjUR
+MwKAhP67A1aEAUUDVAJeeov+pwKy/aJ5jAFZAAAAAgAv//ICuwLAAAkAEwAAEiAWFRQGICY1NBIyN
+jU0JiIGFRTbATSsrP7MrNrYenrYegLAxaKhxsahov47nIeIm5uIhwACAGQAAAJHArIADgAYAAABHg
+EVFAYHBisBESMRMzITNjQnJisBETMyAZRUX2VOHzuAVtY7GlxcGDWIiDUCrgtnVlVpCgT+5gKy/rU
+V1BUF/vgAAAACAC//zAK9AsAAEgAcAAAlFhcHJiMiBwYjIiY1NDYgFhUUJRQWMjY1NCYiBgI9PUMx
+UDcfKh8omqysATSs/dR62Hp62HpICTg7NgkHxqGixcWitbWHnJyHiJubAAIAZAAAAlgCsgAXACMAA
+CUWFyMmJyYnJisBESMRMzIXHgEVFAYHFiUzMjc+ATU0JyYrAQIqDCJfGQwNWhAhglbiOx9QXEY1Tv
+6bhDATMj1lGSyMtYgtOXR0BwH+1wKyBApbU0BSESRAAgVAOGoQBAABADT/8gIoAsAAJQAAATIWFyM
+uASMiBhUUFhceARUUBiMiJiczHgEzMjY1NCYnLgE1NDYBOmd2ClwGS0E6SUNRdW+HZnKKC1wPWkQ9
+Uk1cZGuEAsBwXUJHNjQ3OhIbZVZZbm5kREo+NT5DFRdYUFdrAAAAAAEAIgAAAmQCsgAHAAABIxEjE
+SM1IQJk9lb2AkICbP2UAmxGAAEAXv/yAmQCsgAXAAABERQHDgEiJicmNREzERQXHgEyNjc2NRECZA
+IIgfCBCAJWAgZYmlgGAgKy/k0qFFxzc1wUKgGz/lUrEkRQUEQSKwGrAAAAAAEAIAAAAnoCsgAGAAA
+hIwMzGwEzAYJ07l3N1FwCsv2PAnEAAAEAGgAAA7ECsgAMAAABAyMLASMDMxsBMxsBA7HAcZyicrZi
+kaB0nJkCsv1OAlP9rQKy/ZsCW/2kAmYAAAEAGQAAAm8CsgALAAAhCwEjEwMzGwEzAxMCCsrEY/bkY
+re+Y/D6AST+3AFcAVb+5gEa/q3+oQAAAQATAAACUQKyAAgAAAERIxEDMxsBMwFdVvRjwLphARD+8A
+EQAaL+sQFPAAABAC4AAAI5ArIACQAAJRUhNQEhNSEVAQI5/fUBof57Aen+YUZGQgIqRkX92QAAAAA
+BAGL/sAEFAwwABwAAARUjETMVIxEBBWlpowMMOP0UOANcAAAB//v/4gE0AtAAAwAABSMDMwE0Pvs+
+HgLuAAAAAQAi/7AAxQMMAAcAABcjNTMRIzUzxaNpaaNQOALsOAABAFAA1wH0AmgABgAAJQsBIxMzE
+wGwjY1GsESw1wFZ/qcBkf5vAAAAAQAy/6oBwv/iAAMAAAUhNSEBwv5wAZBWOAAAAAEAKQJEALYCsg
+ADAAATIycztjhVUAJEbgAAAAACACT/8gHQAiAAHQAlAAAhJwcGIyImNTQ2OwE1NCcmIyIHIz4BMzI
+XFh0BFBcnMjY9ASYVFAF6CR0wVUtgkJoiAgdgaQlaBm1Zrg4DCuQ9R+5MOSFQR1tbDiwUUXBUXowf
+J8c9SjRORzYSgVwAAAAAAgBK//ICRQLfABEAHgAAATIWFRQGIyImLwEVIxEzETc2EzI2NTQmIyIGH
+QEUFgFUcYCVbiNJEyNWVigySElcU01JXmECIJd4i5QTEDRJAt/+3jkq/hRuZV55ZWsdX14AAQAe//
+IB9wIgABgAAAEyFhcjJiMiBhUUFjMyNjczDgEjIiY1NDYBF152DFocbEJXU0A1Rw1aE3pbaoKQAiB
+oWH5qZm1tPDlaXYuLgZcAAAACAB7/8gIZAt8AEQAeAAABESM1BwYjIiY1NDYzMhYfAREDMjY9ATQm
+IyIGFRQWAhlWKDJacYCVbiNJEyOnSV5hQUlcUwLf/SFVOSqXeIuUExA0ARb9VWVrHV9ebmVeeQACA
+B7/8gH9AiAAFQAbAAABFAchHgEzMjY3Mw4BIyImNTQ2MzIWJyIGByEmAf0C/oAGUkA1SwlaD4FXbI
+WObmt45UBVBwEqDQEYFhNjWD84W16Oh3+akU9aU60AAAEAFQAAARoC8gAWAAATBh0BMxUjESMRIzU
+zNTQ3PgEzMhcVJqcDbW1WOTkDB0k8Hx5oAngVITRC/jQBzEIsJRs5PwVHEwAAAAIAHv8uAhkCIAAi
+AC8AAAERFAcOASMiLwEzFx4BMzI2NzY9AQcGIyImNTQ2MzIWHwE1AzI2PQE0JiMiBhUUFgIZAQSEd
+NwRAVcBBU5DTlUDASgyWnGAlW4jSRMjp0leYUFJXFMCEv5wSh1zeq8KCTI8VU0ZIQk5Kpd4i5QTED
+RJ/iJlax1fXm5lXnkAAQBKAAACCgLkABcAAAEWFREjETQnLgEHDgEdASMRMxE3NjMyFgIIAlYCBDs
+6RVRWViE5UVViAYUbQP7WASQxGzI7AQJyf+kC5P7TPSxUAAACAD4AAACsAsAABwALAAASMhYUBiIm
+NBMjETNeLiAgLiBiVlYCwCAuICAu/WACEgAC//P/LgCnAsAABwAVAAASMhYUBiImNBcRFAcGIyInN
+RY3NjURWS4gIC4gYgMLcRwNSgYCAsAgLiAgLo79wCUbZAJGBzMOHgJEAAAAAQBKAAACCALfAAsAAC
+EnBxUjETMREzMHEwGTwTJWVvdu9/rgN6kC3/4oAQv6/ugAAQBG//wA3gLfAA8AABMRFBceATcVBiM
+iJicmNRGcAQIcIxkkKi4CAQLf/bkhERoSBD4EJC8SNAJKAAAAAQBKAAADEAIgACQAAAEWFREjETQn
+JiMiFREjETQnJiMiFREjETMVNzYzMhYXNzYzMhYDCwVWBAxedFYEDF50VlYiJko7ThAvJkpEVAGfI
+jn+vAEcQyRZ1v76ARxDJFnW/voCEk08HzYtRB9HAAAAAAEASgAAAgoCIAAWAAABFhURIxE0JyYjIg
+YdASMRMxU3NjMyFgIIAlYCCXBEVVZWITlRVWIBhRtA/tYBJDEbbHR/6QISWz0sVAAAAAACAB7/8gI
+sAiAABwARAAASIBYUBiAmNBIyNjU0JiIGFRSlAQCHh/8Ah7ieWlqeWgIgn/Cfn/D+s3ZfYHV1YF8A
+AgBK/zwCRQIgABEAHgAAATIWFRQGIyImLwERIxEzFTc2EzI2NTQmIyIGHQEUFgFUcYCVbiNJEyNWV
+igySElcU01JXmECIJd4i5QTEDT+8wLWVTkq/hRuZV55ZWsdX14AAgAe/zwCGQIgABEAHgAAAREjEQ
+cGIyImNTQ2MzIWHwE1AzI2PQE0JiMiBhUUFgIZVigyWnGAlW4jSRMjp0leYUFJXFMCEv0qARk5Kpd
+4i5QTEDRJ/iJlax1fXm5lXnkAAQBKAAABPgIeAA0AAAEyFxUmBhURIxEzFTc2ARoWDkdXVlYwIwIe
+B0EFVlf+0gISU0cYAAEAGP/yAa0CIAAjAAATMhYXIyYjIgYVFBYXHgEVFAYjIiYnMxYzMjY1NCYnL
+gE1NDbkV2MJWhNdKy04PF1XbVhWbgxaE2ktOjlEUllkAiBaS2MrJCUoEBlPQkhOVFZoKCUmLhIWSE
+BIUwAAAAEAFP/4ARQCiQAXAAATERQXHgE3FQYjIiYnJjURIzUzNTMVMxWxAQMmMx8qMjMEAUdHVmM
+BzP7PGw4mFgY/BSwxDjQBNUJ7e0IAAAABAEL/8gICAhIAFwAAAREjNQcGIyImJyY1ETMRFBceATMy
+Nj0BAgJWITlRT2EKBVYEBkA1RFECEv3uWj4qTToiOQE+/tIlJC43c4DpAAAAAAEAAQAAAfwCEgAGA
+AABAyMDMxsBAfzJaclfop8CEv3uAhL+LQHTAAABAAEAAAMLAhIADAAAAQMjCwEjAzMbATMbAQMLqW
+Z2dmapY3t0a3Z7AhL97gG+/kICEv5AAcD+QwG9AAAB//oAAAHWAhIACwAAARMjJwcjEwMzFzczARq
+8ZIuKY763ZoWFYwEO/vLV1QEMAQbNzQAAAQAB/y4B+wISABEAAAEDDgEjIic1FjMyNj8BAzMbAQH7
+2iFZQB8NDRIpNhQH02GenQIS/cFVUAJGASozEwIt/i4B0gABABQAAAGxAg4ACQAAJRUhNQEhNSEVA
+QGx/mMBNP7iAYL+zkREQgGIREX+ewAAAAABAED/sAEOAwwALAAAASMiBhUUFxYVFAYHHgEVFAcGFR
+QWOwEVIyImNTQ3NjU0JzU2NTQnJjU0NjsBAQ4MKiMLDS4pKS4NCyMqDAtERAwLUlILDERECwLUGBk
+WTlsgKzUFBTcrIFtOFhkYOC87GFVMIkUIOAhFIkxVGDsvAAAAAAEAYP84AJoDIAADAAAXIxEzmjo6
+yAPoAAEAIf+wAO8DDAAsAAATFQYVFBcWFRQGKwE1MzI2NTQnJjU0NjcuATU0NzY1NCYrATUzMhYVF
+AcGFRTvUgsMREQLDCojCw0uKSkuDQsjKgwLREQMCwF6OAhFIkxVGDsvOBgZFk5bICs1BQU3KyBbTh
+YZGDgvOxhVTCJFAAABAE0A3wH2AWQAEwAAATMUIyImJyYjIhUjNDMyFhcWMzIBvjhuGywtQR0xOG4
+bLC1BHTEBZIURGCNMhREYIwAAAwAk/94DIgLoAAcAEQApAAAAIBYQBiAmECQgBhUUFiA2NTQlMhYX
+IyYjIgYUFjMyNjczDgEjIiY1NDYBAQFE3d3+vN0CB/7wubkBELn+xVBnD1wSWDo+QTcqOQZcEmZWX
+HN2Aujg/rbg4AFKpr+Mjb6+jYxbWEldV5ZZNShLVn5na34AAgB4AFIB9AGeAAUACwAAAQcXIyc3Mw
+cXIyc3AUqJiUmJifOJiUmJiQGepqampqampqYAAAIAHAHSAQ4CwAAHAA8AABIyFhQGIiY0NiIGFBY
+yNjRgakREakSTNCEhNCECwEJqQkJqCiM4IyM4AAAAAAIAUAAAAfQCCwALAA8AAAEzFSMVIzUjNTM1
+MxMhNSEBP7W1OrW1OrX+XAGkAVs4tLQ4sP31OAAAAQB0AkQBAQKyAAMAABMjNzOsOD1QAkRuAAAAA
+AEAIADsAKoBdgAHAAASMhYUBiImNEg6KCg6KAF2KDooKDoAAAIAOQBSAbUBngAFAAsAACUHIzcnMw
+UHIzcnMwELiUmJiUkBM4lJiYlJ+KampqampqYAAAABADYB5QDhAt8ABAAAEzczByM2Xk1OXQHv8Po
+AAQAWAeUAwQLfAAQAABMHIzczwV5NTl0C1fD6AAIANgHlAYsC3wAEAAkAABM3MwcjPwEzByM2Xk1O
+XapeTU5dAe/w+grw+gAAAgAWAeUBawLfAAQACQAAEwcjNzMXByM3M8FeTU5dql5NTl0C1fD6CvD6A
+AADACX/8gI1AHIABwAPABcAADYyFhQGIiY0NjIWFAYiJjQ2MhYUBiImNEk4JCQ4JOw4JCQ4JOw4JC
+Q4JHIkOCQkOCQkOCQkOCQkOCQkOAAAAAEAeABSAUoBngAFAAABBxcjJzcBSomJSYmJAZ6mpqamAAA
+AAAEAOQBSAQsBngAFAAAlByM3JzMBC4lJiYlJ+KampgAAAf9qAAABgQKyAAMAACsBATM/VwHAVwKy
+AAAAAAIAFAHIAdwClAAHABQAABMVIxUjNSM1BRUjNwcjJxcjNTMXN9pKMkoByDICKzQqATJLKysCl
+CmjoykBy46KiY3Lm5sAAQAVAAABvALyABgAAAERIxEjESMRIzUzNTQ3NjMyFxUmBgcGHQEBvFbCVj
+k5AxHHHx5iVgcDAg798gHM/jQBzEIOJRuWBUcIJDAVIRYAAAABABX//AHkAvIAJQAAJR4BNxUGIyI
+mJyY1ESYjIgcGHQEzFSMRIxEjNTM1NDc2MzIXERQBowIcIxkkKi4CAR4nXgwDbW1WLy8DEbNdOmYa
+EQQ/BCQvEjQCFQZWFSEWQv40AcxCDiUblhP9uSEAAAAAAAAWAQ4AAQAAAAAAAAATACgAAQAAAAAAA
+QAHAEwAAQAAAAAAAgAHAGQAAQAAAAAAAwAaAKIAAQAAAAAABAAHAM0AAQAAAAAABQA8AU8AAQAAAA
+AABgAPAawAAQAAAAAACAALAdQAAQAAAAAACQALAfgAAQAAAAAACwAXAjQAAQAAAAAADAAXAnwAAwA
+BBAkAAAAmAAAAAwABBAkAAQAOADwAAwABBAkAAgAOAFQAAwABBAkAAwA0AGwAAwABBAkABAAOAL0A
+AwABBAkABQB4ANUAAwABBAkABgAeAYwAAwABBAkACAAWAbwAAwABBAkACQAWAeAAAwABBAkACwAuA
+gQAAwABBAkADAAuAkwATgBvACAAUgBpAGcAaAB0AHMAIABSAGUAcwBlAHIAdgBlAGQALgAATm8gUm
+lnaHRzIFJlc2VydmVkLgAAQQBpAGwAZQByAG8AbgAAQWlsZXJvbgAAUgBlAGcAdQBsAGEAcgAAUmV
+ndWxhcgAAMQAuADEAMAAyADsAVQBLAFcATgA7AEEAaQBsAGUAcgBvAG4ALQBSAGUAZwB1AGwAYQBy
+AAAxLjEwMjtVS1dOO0FpbGVyb24tUmVndWxhcgAAQQBpAGwAZQByAG8AbgAAQWlsZXJvbgAAVgBlA
+HIAcwBpAG8AbgAgADEALgAxADAAMgA7AFAAUwAgADAAMAAxAC4AMQAwADIAOwBoAG8AdABjAG8Abg
+B2ACAAMQAuADAALgA3ADAAOwBtAGEAawBlAG8AdABmAC4AbABpAGIAMgAuADUALgA1ADgAMwAyADk
+AAFZlcnNpb24gMS4xMDI7UFMgMDAxLjEwMjtob3Rjb252IDEuMC43MDttYWtlb3RmLmxpYjIuNS41
+ODMyOQAAQQBpAGwAZQByAG8AbgAtAFIAZQBnAHUAbABhAHIAAEFpbGVyb24tUmVndWxhcgAAUwBvA
+HIAYQAgAFMAYQBnAGEAbgBvAABTb3JhIFNhZ2FubwAAUwBvAHIAYQAgAFMAYQBnAGEAbgBvAABTb3
+JhIFNhZ2FubwAAaAB0AHQAcAA6AC8ALwB3AHcAdwAuAGQAbwB0AGMAbwBsAG8AbgAuAG4AZQB0AAB
+odHRwOi8vd3d3LmRvdGNvbG9uLm5ldAAAaAB0AHQAcAA6AC8ALwB3AHcAdwAuAGQAbwB0AGMAbwBs
+AG8AbgAuAG4AZQB0AABodHRwOi8vd3d3LmRvdGNvbG9uLm5ldAAAAAACAAAAAAAA/4MAMgAAAAAAA
+AAAAAAAAAAAAAAAAAAAAHQAAAABAAIAAwAEAAUABgAHAAgACQAKAAsADAANAA4ADwAQABEAEgATAB
+QAFQAWABcAGAAZABoAGwAcAB0AHgAfACAAIQAiACMAJAAlACYAJwAoACkAKgArACwALQAuAC8AMAA
+xADIAMwA0ADUANgA3ADgAOQA6ADsAPAA9AD4APwBAAEEAQgBDAEQARQBGAEcASABJAEoASwBMAE0A
+TgBPAFAAUQBSAFMAVABVAFYAVwBYAFkAWgBbAFwAXQBeAF8AYABhAIsAqQCDAJMAjQDDAKoAtgC3A
+LQAtQCrAL4AvwC8AIwAwADBAAAAAAAB//8AAgABAAAADAAAABwAAAACAAIAAwBxAAEAcgBzAAIABA
+AAAAIAAAABAAAACgBMAGYAAkRGTFQADmxhdG4AGgAEAAAAAP//AAEAAAAWAANDQVQgAB5NT0wgABZ
+ST00gABYAAP//AAEAAAAA//8AAgAAAAEAAmxpZ2EADmxvY2wAFAAAAAEAAQAAAAEAAAACAAYAEAAG
+AAAAAgASADQABAAAAAEATAADAAAAAgAQABYAAQAcAAAAAQABAE8AAQABAGcAAQABAE8AAwAAAAIAE
+AAWAAEAHAAAAAEAAQAvAAEAAQBnAAEAAQAvAAEAGgABAAgAAgAGAAwAcwACAE8AcgACAEwAAQABAE
+kAAAABAAAACgBGAGAAAkRGTFQADmxhdG4AHAAEAAAAAP//AAIAAAABABYAA0NBVCAAFk1PTCAAFlJ
+PTSAAFgAA//8AAgAAAAEAAmNwc3AADmtlcm4AFAAAAAEAAAAAAAEAAQACAAYADgABAAAAAQASAAIA
+AAACAB4ANgABAAoABQAFAAoAAgABACQAPQAAAAEAEgAEAAAAAQAMAAEAOP/nAAEAAQAkAAIGigAEA
+AAFJAXKABoAGQAA//gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAD/sv+4/+z/7v/MAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAD/xAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/9T/6AAAAAD/8QAA
+ABD/vQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/7gAAAAAAAAAAAAAAAAAA//MAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABIAAAAAAAAAAP/5AAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/gAAD/4AAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//L/9AAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAA/+gAAAAAAAkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/zAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/mAAAAAAAAAAAAAAAAAAD
+/4gAA//AAAAAA//YAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/+AAAAAAAAP/OAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/zv/qAAAAAP/0AAAACAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/ZAAD/egAA/1kAAAAA/5D/rgAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAD/9AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAD/8AAA/7b/8P+wAAD/8P/E/98AAAAA/8P/+P/0//oAAAAAAAAAAAAA//gA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/+AAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/w//C/9MAAP/SAAD/9wAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAD/yAAA/+kAAAAA//QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/9wAAAAD//QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAP/2AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAP/cAAAAAAAAAAAAAAAA/7YAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAP/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/6AAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAkAFAAEAAAAAQACwAAABcA
+BgAAAAAAAAAIAA4AAAAAAAsAEgAAAAAAAAATABkAAwANAAAAAQAJAAAAAAAAAAAAAAAAAAAAGAAAA
+AAABwAAAAAAAAAAAAAAFQAFAAAAAAAYABgAAAAUAAAACgAAAAwAAgAPABEAFgAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFAAEAEQBdAAYAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAcAAAAAAAAABwAAAAAACAAAAAAAAAAAAAcAAAAHAAAAEwAJ
+ABUADgAPAAAACwAQAAAAAAAAAAAAAAAAAAUAGAACAAIAAgAAAAIAGAAXAAAAGAAAABYAFgACABYAA
+gAWAAAAEQADAAoAFAAMAA0ABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASAAAAEgAGAAEAHgAkAC
+YAJwApACoALQAuAC8AMgAzADcAOAA5ADoAPAA9AEUASABOAE8AUgBTAFUAVwBZAFoAWwBcAF0AcwA
+AAAAAAQAAAADa3tfFAAAAANAan9kAAAAA4QodoQ==
+"""
+ )
+ ),
+ 10 if size is None else size,
+ layout_engine=Layout.BASIC,
+ )
+ else:
+ f = ImageFont()
+ f._load_pilfont_data(
+ # courB08
+ BytesIO(
+ base64.b64decode(
+ b"""
+UElMZm9udAo7Ozs7OzsxMDsKREFUQQoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAA//8AAQAAAAAAAAABAAEA
+BgAAAAH/+gADAAAAAQAAAAMABgAGAAAAAf/6AAT//QADAAAABgADAAYAAAAA//kABQABAAYAAAAL
+AAgABgAAAAD/+AAFAAEACwAAABAACQAGAAAAAP/5AAUAAAAQAAAAFQAHAAYAAP////oABQAAABUA
+AAAbAAYABgAAAAH/+QAE//wAGwAAAB4AAwAGAAAAAf/5AAQAAQAeAAAAIQAIAAYAAAAB//kABAAB
+ACEAAAAkAAgABgAAAAD/+QAE//0AJAAAACgABAAGAAAAAP/6AAX//wAoAAAALQAFAAYAAAAB//8A
+BAACAC0AAAAwAAMABgAAAAD//AAF//0AMAAAADUAAQAGAAAAAf//AAMAAAA1AAAANwABAAYAAAAB
+//kABQABADcAAAA7AAgABgAAAAD/+QAFAAAAOwAAAEAABwAGAAAAAP/5AAYAAABAAAAARgAHAAYA
+AAAA//kABQAAAEYAAABLAAcABgAAAAD/+QAFAAAASwAAAFAABwAGAAAAAP/5AAYAAABQAAAAVgAH
+AAYAAAAA//kABQAAAFYAAABbAAcABgAAAAD/+QAFAAAAWwAAAGAABwAGAAAAAP/5AAUAAABgAAAA
+ZQAHAAYAAAAA//kABQAAAGUAAABqAAcABgAAAAD/+QAFAAAAagAAAG8ABwAGAAAAAf/8AAMAAABv
+AAAAcQAEAAYAAAAA//wAAwACAHEAAAB0AAYABgAAAAD/+gAE//8AdAAAAHgABQAGAAAAAP/7AAT/
+/gB4AAAAfAADAAYAAAAB//oABf//AHwAAACAAAUABgAAAAD/+gAFAAAAgAAAAIUABgAGAAAAAP/5
+AAYAAQCFAAAAiwAIAAYAAP////oABgAAAIsAAACSAAYABgAA////+gAFAAAAkgAAAJgABgAGAAAA
+AP/6AAUAAACYAAAAnQAGAAYAAP////oABQAAAJ0AAACjAAYABgAA////+gAFAAAAowAAAKkABgAG
+AAD////6AAUAAACpAAAArwAGAAYAAAAA//oABQAAAK8AAAC0AAYABgAA////+gAGAAAAtAAAALsA
+BgAGAAAAAP/6AAQAAAC7AAAAvwAGAAYAAP////oABQAAAL8AAADFAAYABgAA////+gAGAAAAxQAA
+AMwABgAGAAD////6AAUAAADMAAAA0gAGAAYAAP////oABQAAANIAAADYAAYABgAA////+gAGAAAA
+2AAAAN8ABgAGAAAAAP/6AAUAAADfAAAA5AAGAAYAAP////oABQAAAOQAAADqAAYABgAAAAD/+gAF
+AAEA6gAAAO8ABwAGAAD////6AAYAAADvAAAA9gAGAAYAAAAA//oABQAAAPYAAAD7AAYABgAA////
++gAFAAAA+wAAAQEABgAGAAD////6AAYAAAEBAAABCAAGAAYAAP////oABgAAAQgAAAEPAAYABgAA
+////+gAGAAABDwAAARYABgAGAAAAAP/6AAYAAAEWAAABHAAGAAYAAP////oABgAAARwAAAEjAAYA
+BgAAAAD/+gAFAAABIwAAASgABgAGAAAAAf/5AAQAAQEoAAABKwAIAAYAAAAA//kABAABASsAAAEv
+AAgABgAAAAH/+QAEAAEBLwAAATIACAAGAAAAAP/5AAX//AEyAAABNwADAAYAAAAAAAEABgACATcA
+AAE9AAEABgAAAAH/+QAE//wBPQAAAUAAAwAGAAAAAP/7AAYAAAFAAAABRgAFAAYAAP////kABQAA
+AUYAAAFMAAcABgAAAAD/+wAFAAABTAAAAVEABQAGAAAAAP/5AAYAAAFRAAABVwAHAAYAAAAA//sA
+BQAAAVcAAAFcAAUABgAAAAD/+QAFAAABXAAAAWEABwAGAAAAAP/7AAYAAgFhAAABZwAHAAYAAP//
+//kABQAAAWcAAAFtAAcABgAAAAD/+QAGAAABbQAAAXMABwAGAAAAAP/5AAQAAgFzAAABdwAJAAYA
+AP////kABgAAAXcAAAF+AAcABgAAAAD/+QAGAAABfgAAAYQABwAGAAD////7AAUAAAGEAAABigAF
+AAYAAP////sABQAAAYoAAAGQAAUABgAAAAD/+wAFAAABkAAAAZUABQAGAAD////7AAUAAgGVAAAB
+mwAHAAYAAAAA//sABgACAZsAAAGhAAcABgAAAAD/+wAGAAABoQAAAacABQAGAAAAAP/7AAYAAAGn
+AAABrQAFAAYAAAAA//kABgAAAa0AAAGzAAcABgAA////+wAGAAABswAAAboABQAGAAD////7AAUA
+AAG6AAABwAAFAAYAAP////sABgAAAcAAAAHHAAUABgAAAAD/+wAGAAABxwAAAc0ABQAGAAD////7
+AAYAAgHNAAAB1AAHAAYAAAAA//sABQAAAdQAAAHZAAUABgAAAAH/+QAFAAEB2QAAAd0ACAAGAAAA
+Av/6AAMAAQHdAAAB3gAHAAYAAAAA//kABAABAd4AAAHiAAgABgAAAAD/+wAF//0B4gAAAecAAgAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAB
+//sAAwACAecAAAHpAAcABgAAAAD/+QAFAAEB6QAAAe4ACAAGAAAAAP/5AAYAAAHuAAAB9AAHAAYA
+AAAA//oABf//AfQAAAH5AAUABgAAAAD/+QAGAAAB+QAAAf8ABwAGAAAAAv/5AAMAAgH/AAACAAAJ
+AAYAAAAA//kABQABAgAAAAIFAAgABgAAAAH/+gAE//sCBQAAAggAAQAGAAAAAP/5AAYAAAIIAAAC
+DgAHAAYAAAAB//kABf/+Ag4AAAISAAUABgAA////+wAGAAACEgAAAhkABQAGAAAAAP/7AAX//gIZ
+AAACHgADAAYAAAAA//wABf/9Ah4AAAIjAAEABgAAAAD/+QAHAAACIwAAAioABwAGAAAAAP/6AAT/
++wIqAAACLgABAAYAAAAA//kABP/8Ai4AAAIyAAMABgAAAAD/+gAFAAACMgAAAjcABgAGAAAAAf/5
+AAT//QI3AAACOgAEAAYAAAAB//kABP/9AjoAAAI9AAQABgAAAAL/+QAE//sCPQAAAj8AAgAGAAD/
+///7AAYAAgI/AAACRgAHAAYAAAAA//kABgABAkYAAAJMAAgABgAAAAH//AAD//0CTAAAAk4AAQAG
+AAAAAf//AAQAAgJOAAACUQADAAYAAAAB//kABP/9AlEAAAJUAAQABgAAAAH/+QAF//4CVAAAAlgA
+BQAGAAD////7AAYAAAJYAAACXwAFAAYAAP////kABgAAAl8AAAJmAAcABgAA////+QAGAAACZgAA
+Am0ABwAGAAD////5AAYAAAJtAAACdAAHAAYAAAAA//sABQACAnQAAAJ5AAcABgAA////9wAGAAAC
+eQAAAoAACQAGAAD////3AAYAAAKAAAAChwAJAAYAAP////cABgAAAocAAAKOAAkABgAA////9wAG
+AAACjgAAApUACQAGAAD////4AAYAAAKVAAACnAAIAAYAAP////cABgAAApwAAAKjAAkABgAA////
++gAGAAACowAAAqoABgAGAAAAAP/6AAUAAgKqAAACrwAIAAYAAP////cABQAAAq8AAAK1AAkABgAA
+////9wAFAAACtQAAArsACQAGAAD////3AAUAAAK7AAACwQAJAAYAAP////gABQAAAsEAAALHAAgA
+BgAAAAD/9wAEAAACxwAAAssACQAGAAAAAP/3AAQAAALLAAACzwAJAAYAAAAA//cABAAAAs8AAALT
+AAkABgAAAAD/+AAEAAAC0wAAAtcACAAGAAD////6AAUAAALXAAAC3QAGAAYAAP////cABgAAAt0A
+AALkAAkABgAAAAD/9wAFAAAC5AAAAukACQAGAAAAAP/3AAUAAALpAAAC7gAJAAYAAAAA//cABQAA
+Au4AAALzAAkABgAAAAD/9wAFAAAC8wAAAvgACQAGAAAAAP/4AAUAAAL4AAAC/QAIAAYAAAAA//oA
+Bf//Av0AAAMCAAUABgAA////+gAGAAADAgAAAwkABgAGAAD////3AAYAAAMJAAADEAAJAAYAAP//
+//cABgAAAxAAAAMXAAkABgAA////9wAGAAADFwAAAx4ACQAGAAD////4AAYAAAAAAAoABwASAAYA
+AP////cABgAAAAcACgAOABMABgAA////+gAFAAAADgAKABQAEAAGAAD////6AAYAAAAUAAoAGwAQ
+AAYAAAAA//gABgAAABsACgAhABIABgAAAAD/+AAGAAAAIQAKACcAEgAGAAAAAP/4AAYAAAAnAAoA
+LQASAAYAAAAA//gABgAAAC0ACgAzABIABgAAAAD/+QAGAAAAMwAKADkAEQAGAAAAAP/3AAYAAAA5
+AAoAPwATAAYAAP////sABQAAAD8ACgBFAA8ABgAAAAD/+wAFAAIARQAKAEoAEQAGAAAAAP/4AAUA
+AABKAAoATwASAAYAAAAA//gABQAAAE8ACgBUABIABgAAAAD/+AAFAAAAVAAKAFkAEgAGAAAAAP/5
+AAUAAABZAAoAXgARAAYAAAAA//gABgAAAF4ACgBkABIABgAAAAD/+AAGAAAAZAAKAGoAEgAGAAAA
+AP/4AAYAAABqAAoAcAASAAYAAAAA//kABgAAAHAACgB2ABEABgAAAAD/+AAFAAAAdgAKAHsAEgAG
+AAD////4AAYAAAB7AAoAggASAAYAAAAA//gABQAAAIIACgCHABIABgAAAAD/+AAFAAAAhwAKAIwA
+EgAGAAAAAP/4AAUAAACMAAoAkQASAAYAAAAA//gABQAAAJEACgCWABIABgAAAAD/+QAFAAAAlgAK
+AJsAEQAGAAAAAP/6AAX//wCbAAoAoAAPAAYAAAAA//oABQABAKAACgClABEABgAA////+AAGAAAA
+pQAKAKwAEgAGAAD////4AAYAAACsAAoAswASAAYAAP////gABgAAALMACgC6ABIABgAA////+QAG
+AAAAugAKAMEAEQAGAAD////4AAYAAgDBAAoAyAAUAAYAAP////kABQACAMgACgDOABMABgAA////
++QAGAAIAzgAKANUAEw==
+"""
+ )
+ ),
+ Image.open(
+ BytesIO(
+ base64.b64decode(
+ b"""
+iVBORw0KGgoAAAANSUhEUgAAAx4AAAAUAQAAAAArMtZoAAAEwElEQVR4nABlAJr/AHVE4czCI/4u
+Mc4b7vuds/xzjz5/3/7u/n9vMe7vnfH/9++vPn/xyf5zhxzjt8GHw8+2d83u8x27199/nxuQ6Od9
+M43/5z2I+9n9ZtmDBwMQECDRQw/eQIQohJXxpBCNVE6QCCAAAAD//wBlAJr/AgALyj1t/wINwq0g
+LeNZUworuN1cjTPIzrTX6ofHWeo3v336qPzfEwRmBnHTtf95/fglZK5N0PDgfRTslpGBvz7LFc4F
+IUXBWQGjQ5MGCx34EDFPwXiY4YbYxavpnhHFrk14CDAAAAD//wBlAJr/AgKqRooH2gAgPeggvUAA
+Bu2WfgPoAwzRAABAAAAAAACQgLz/3Uv4Gv+gX7BJgDeeGP6AAAD1NMDzKHD7ANWr3loYbxsAD791
+NAADfcoIDyP44K/jv4Y63/Z+t98Ovt+ub4T48LAAAAD//wBlAJr/AuplMlADJAAAAGuAphWpqhMx
+in0A/fRvAYBABPgBwBUgABBQ/sYAyv9g0bCHgOLoGAAAAAAAREAAwI7nr0ArYpow7aX8//9LaP/9
+SjdavWA8ePHeBIKB//81/83ndznOaXx379wAAAD//wBlAJr/AqDxW+D3AABAAbUh/QMnbQag/gAY
+AYDAAACgtgD/gOqAAAB5IA/8AAAk+n9w0AAA8AAAmFRJuPo27ciC0cD5oeW4E7KA/wD3ECMAn2tt
+y8PgwH8AfAxFzC0JzeAMtratAsC/ffwAAAD//wBlAJr/BGKAyCAA4AAAAvgeYTAwHd1kmQF5chkG
+ABoMIHcL5xVpTfQbUqzlAAAErwAQBgAAEOClA5D9il08AEh/tUzdCBsXkbgACED+woQg8Si9VeqY
+lODCn7lmF6NhnAEYgAAA/NMIAAAAAAD//2JgjLZgVGBg5Pv/Tvpc8hwGBjYGJADjHDrAwPzAjv/H
+/Wf3PzCwtzcwHmBgYGcwbZz8wHaCAQMDOwMDQ8MCBgYOC3W7mp+f0w+wHOYxO3OG+e376hsMZjk3
+AAAAAP//YmCMY2A4wMAIN5e5gQETPD6AZisDAwMDgzSDAAPjByiHcQMDAwMDg1nOze1lByRu5/47
+c4859311AYNZzg0AAAAA//9iYGDBYihOIIMuwIjGL39/fwffA8b//xv/P2BPtzzHwCBjUQAAAAD/
+/yLFBrIBAAAA//9i1HhcwdhizX7u8NZNzyLbvT97bfrMf/QHI8evOwcSqGUJAAAA//9iYBB81iSw
+pEE170Qrg5MIYydHqwdDQRMrAwcVrQAAAAD//2J4x7j9AAMDn8Q/BgYLBoaiAwwMjPdvMDBYM1Tv
+oJodAAAAAP//Yqo/83+dxePWlxl3npsel9lvLfPcqlE9725C+acfVLMEAAAA//9i+s9gwCoaaGMR
+evta/58PTEWzr21hufPjA8N+qlnBwAAAAAD//2JiWLci5v1+HmFXDqcnULE/MxgYGBj+f6CaJQAA
+AAD//2Ji2FrkY3iYpYC5qDeGgeEMAwPDvwQBBoYvcTwOVLMEAAAA//9isDBgkP///0EOg9z35v//
+Gc/eeW7BwPj5+QGZhANUswMAAAD//2JgqGBgYGBgqEMXlvhMPUsAAAAA//8iYDd1AAAAAP//AwDR
+w7IkEbzhVQAAAABJRU5ErkJggg==
+"""
+ )
+ )
+ ),
+ )
+ return f
diff --git a/Lib/site-packages/PIL/ImageGrab.py b/Lib/site-packages/PIL/ImageGrab.py
new file mode 100644
index 0000000..a4993d3
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageGrab.py
@@ -0,0 +1,178 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# screen grabber
+#
+# History:
+# 2001-04-26 fl created
+# 2001-09-17 fl use builtin driver, if present
+# 2002-11-19 fl added grabclipboard support
+#
+# Copyright (c) 2001-2002 by Secret Labs AB
+# Copyright (c) 2001-2002 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import io
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+from . import Image
+
+
+def grab(bbox=None, include_layered_windows=False, all_screens=False, xdisplay=None):
+ if xdisplay is None:
+ if sys.platform == "darwin":
+ fh, filepath = tempfile.mkstemp(".png")
+ os.close(fh)
+ args = ["screencapture"]
+ if bbox:
+ left, top, right, bottom = bbox
+ args += ["-R", f"{left},{top},{right-left},{bottom-top}"]
+ subprocess.call(args + ["-x", filepath])
+ im = Image.open(filepath)
+ im.load()
+ os.unlink(filepath)
+ if bbox:
+ im_resized = im.resize((right - left, bottom - top))
+ im.close()
+ return im_resized
+ return im
+ elif sys.platform == "win32":
+ offset, size, data = Image.core.grabscreen_win32(
+ include_layered_windows, all_screens
+ )
+ im = Image.frombytes(
+ "RGB",
+ size,
+ data,
+ # RGB, 32-bit line padding, origin lower left corner
+ "raw",
+ "BGR",
+ (size[0] * 3 + 3) & -4,
+ -1,
+ )
+ if bbox:
+ x0, y0 = offset
+ left, top, right, bottom = bbox
+ im = im.crop((left - x0, top - y0, right - x0, bottom - y0))
+ return im
+ try:
+ if not Image.core.HAVE_XCB:
+ msg = "Pillow was built without XCB support"
+ raise OSError(msg)
+ size, data = Image.core.grabscreen_x11(xdisplay)
+ except OSError:
+ if (
+ xdisplay is None
+ and sys.platform not in ("darwin", "win32")
+ and shutil.which("gnome-screenshot")
+ ):
+ fh, filepath = tempfile.mkstemp(".png")
+ os.close(fh)
+ subprocess.call(["gnome-screenshot", "-f", filepath])
+ im = Image.open(filepath)
+ im.load()
+ os.unlink(filepath)
+ if bbox:
+ im_cropped = im.crop(bbox)
+ im.close()
+ return im_cropped
+ return im
+ else:
+ raise
+ else:
+ im = Image.frombytes("RGB", size, data, "raw", "BGRX", size[0] * 4, 1)
+ if bbox:
+ im = im.crop(bbox)
+ return im
+
+
+def grabclipboard():
+ if sys.platform == "darwin":
+ fh, filepath = tempfile.mkstemp(".png")
+ os.close(fh)
+ commands = [
+ 'set theFile to (open for access POSIX file "'
+ + filepath
+ + '" with write permission)',
+ "try",
+ " write (the clipboard as «class PNGf») to theFile",
+ "end try",
+ "close access theFile",
+ ]
+ script = ["osascript"]
+ for command in commands:
+ script += ["-e", command]
+ subprocess.call(script)
+
+ im = None
+ if os.stat(filepath).st_size != 0:
+ im = Image.open(filepath)
+ im.load()
+ os.unlink(filepath)
+ return im
+ elif sys.platform == "win32":
+ fmt, data = Image.core.grabclipboard_win32()
+ if fmt == "file": # CF_HDROP
+ import struct
+
+ o = struct.unpack_from("I", data)[0]
+ if data[16] != 0:
+ files = data[o:].decode("utf-16le").split("\0")
+ else:
+ files = data[o:].decode("mbcs").split("\0")
+ return files[: files.index("")]
+ if isinstance(data, bytes):
+ data = io.BytesIO(data)
+ if fmt == "png":
+ from . import PngImagePlugin
+
+ return PngImagePlugin.PngImageFile(data)
+ elif fmt == "DIB":
+ from . import BmpImagePlugin
+
+ return BmpImagePlugin.DibImageFile(data)
+ return None
+ else:
+ if os.getenv("WAYLAND_DISPLAY"):
+ session_type = "wayland"
+ elif os.getenv("DISPLAY"):
+ session_type = "x11"
+ else: # Session type check failed
+ session_type = None
+
+ if shutil.which("wl-paste") and session_type in ("wayland", None):
+ output = subprocess.check_output(["wl-paste", "-l"]).decode()
+ mimetypes = output.splitlines()
+ if "image/png" in mimetypes:
+ mimetype = "image/png"
+ elif mimetypes:
+ mimetype = mimetypes[0]
+ else:
+ mimetype = None
+
+ args = ["wl-paste"]
+ if mimetype:
+ args.extend(["-t", mimetype])
+ elif shutil.which("xclip") and session_type in ("x11", None):
+ args = ["xclip", "-selection", "clipboard", "-t", "image/png", "-o"]
+ else:
+ msg = "wl-paste or xclip is required for ImageGrab.grabclipboard() on Linux"
+ raise NotImplementedError(msg)
+
+ p = subprocess.run(args, capture_output=True)
+ err = p.stderr
+ if err:
+ msg = f"{args[0]} error: {err.strip().decode()}"
+ raise ChildProcessError(msg)
+ data = io.BytesIO(p.stdout)
+ im = Image.open(data)
+ im.load()
+ return im
diff --git a/Lib/site-packages/PIL/ImageMath.py b/Lib/site-packages/PIL/ImageMath.py
new file mode 100644
index 0000000..b77f4bc
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageMath.py
@@ -0,0 +1,265 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# a simple math add-on for the Python Imaging Library
+#
+# History:
+# 1999-02-15 fl Original PIL Plus release
+# 2005-05-05 fl Simplified and cleaned up for PIL 1.1.6
+# 2005-09-12 fl Fixed int() and float() for Python 2.4.1
+#
+# Copyright (c) 1999-2005 by Secret Labs AB
+# Copyright (c) 2005 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import builtins
+
+from . import Image, _imagingmath
+
+
+class _Operand:
+ """Wraps an image operand, providing standard operators"""
+
+ def __init__(self, im):
+ self.im = im
+
+ def __fixup(self, im1):
+ # convert image to suitable mode
+ if isinstance(im1, _Operand):
+ # argument was an image.
+ if im1.im.mode in ("1", "L"):
+ return im1.im.convert("I")
+ elif im1.im.mode in ("I", "F"):
+ return im1.im
+ else:
+ msg = f"unsupported mode: {im1.im.mode}"
+ raise ValueError(msg)
+ else:
+ # argument was a constant
+ if isinstance(im1, (int, float)) and self.im.mode in ("1", "L", "I"):
+ return Image.new("I", self.im.size, im1)
+ else:
+ return Image.new("F", self.im.size, im1)
+
+ def apply(self, op, im1, im2=None, mode=None):
+ im1 = self.__fixup(im1)
+ if im2 is None:
+ # unary operation
+ out = Image.new(mode or im1.mode, im1.size, None)
+ im1.load()
+ try:
+ op = getattr(_imagingmath, op + "_" + im1.mode)
+ except AttributeError as e:
+ msg = f"bad operand type for '{op}'"
+ raise TypeError(msg) from e
+ _imagingmath.unop(op, out.im.id, im1.im.id)
+ else:
+ # binary operation
+ im2 = self.__fixup(im2)
+ if im1.mode != im2.mode:
+ # convert both arguments to floating point
+ if im1.mode != "F":
+ im1 = im1.convert("F")
+ if im2.mode != "F":
+ im2 = im2.convert("F")
+ if im1.size != im2.size:
+ # crop both arguments to a common size
+ size = (min(im1.size[0], im2.size[0]), min(im1.size[1], im2.size[1]))
+ if im1.size != size:
+ im1 = im1.crop((0, 0) + size)
+ if im2.size != size:
+ im2 = im2.crop((0, 0) + size)
+ out = Image.new(mode or im1.mode, im1.size, None)
+ im1.load()
+ im2.load()
+ try:
+ op = getattr(_imagingmath, op + "_" + im1.mode)
+ except AttributeError as e:
+ msg = f"bad operand type for '{op}'"
+ raise TypeError(msg) from e
+ _imagingmath.binop(op, out.im.id, im1.im.id, im2.im.id)
+ return _Operand(out)
+
+ # unary operators
+ def __bool__(self):
+ # an image is "true" if it contains at least one non-zero pixel
+ return self.im.getbbox() is not None
+
+ def __abs__(self):
+ return self.apply("abs", self)
+
+ def __pos__(self):
+ return self
+
+ def __neg__(self):
+ return self.apply("neg", self)
+
+ # binary operators
+ def __add__(self, other):
+ return self.apply("add", self, other)
+
+ def __radd__(self, other):
+ return self.apply("add", other, self)
+
+ def __sub__(self, other):
+ return self.apply("sub", self, other)
+
+ def __rsub__(self, other):
+ return self.apply("sub", other, self)
+
+ def __mul__(self, other):
+ return self.apply("mul", self, other)
+
+ def __rmul__(self, other):
+ return self.apply("mul", other, self)
+
+ def __truediv__(self, other):
+ return self.apply("div", self, other)
+
+ def __rtruediv__(self, other):
+ return self.apply("div", other, self)
+
+ def __mod__(self, other):
+ return self.apply("mod", self, other)
+
+ def __rmod__(self, other):
+ return self.apply("mod", other, self)
+
+ def __pow__(self, other):
+ return self.apply("pow", self, other)
+
+ def __rpow__(self, other):
+ return self.apply("pow", other, self)
+
+ # bitwise
+ def __invert__(self):
+ return self.apply("invert", self)
+
+ def __and__(self, other):
+ return self.apply("and", self, other)
+
+ def __rand__(self, other):
+ return self.apply("and", other, self)
+
+ def __or__(self, other):
+ return self.apply("or", self, other)
+
+ def __ror__(self, other):
+ return self.apply("or", other, self)
+
+ def __xor__(self, other):
+ return self.apply("xor", self, other)
+
+ def __rxor__(self, other):
+ return self.apply("xor", other, self)
+
+ def __lshift__(self, other):
+ return self.apply("lshift", self, other)
+
+ def __rshift__(self, other):
+ return self.apply("rshift", self, other)
+
+ # logical
+ def __eq__(self, other):
+ return self.apply("eq", self, other)
+
+ def __ne__(self, other):
+ return self.apply("ne", self, other)
+
+ def __lt__(self, other):
+ return self.apply("lt", self, other)
+
+ def __le__(self, other):
+ return self.apply("le", self, other)
+
+ def __gt__(self, other):
+ return self.apply("gt", self, other)
+
+ def __ge__(self, other):
+ return self.apply("ge", self, other)
+
+
+# conversions
+def imagemath_int(self):
+ return _Operand(self.im.convert("I"))
+
+
+def imagemath_float(self):
+ return _Operand(self.im.convert("F"))
+
+
+# logical
+def imagemath_equal(self, other):
+ return self.apply("eq", self, other, mode="I")
+
+
+def imagemath_notequal(self, other):
+ return self.apply("ne", self, other, mode="I")
+
+
+def imagemath_min(self, other):
+ return self.apply("min", self, other)
+
+
+def imagemath_max(self, other):
+ return self.apply("max", self, other)
+
+
+def imagemath_convert(self, mode):
+ return _Operand(self.im.convert(mode))
+
+
+ops = {}
+for k, v in list(globals().items()):
+ if k[:10] == "imagemath_":
+ ops[k[10:]] = v
+
+
+def eval(expression, _dict={}, **kw):
+ """
+ Evaluates an image expression.
+
+ :param expression: A string containing a Python-style expression.
+ :param options: Values to add to the evaluation context. You
+ can either use a dictionary, or one or more keyword
+ arguments.
+ :return: The evaluated expression. This is usually an image object, but can
+ also be an integer, a floating point value, or a pixel tuple,
+ depending on the expression.
+ """
+
+ # build execution namespace
+ args = ops.copy()
+ for k in list(_dict.keys()) + list(kw.keys()):
+ if "__" in k or hasattr(builtins, k):
+ msg = f"'{k}' not allowed"
+ raise ValueError(msg)
+
+ args.update(_dict)
+ args.update(kw)
+ for k, v in args.items():
+ if hasattr(v, "im"):
+ args[k] = _Operand(v)
+
+ compiled_code = compile(expression, "", "eval")
+
+ def scan(code):
+ for const in code.co_consts:
+ if type(const) is type(compiled_code):
+ scan(const)
+
+ for name in code.co_names:
+ if name not in args and name != "abs":
+ msg = f"'{name}' not allowed"
+ raise ValueError(msg)
+
+ scan(compiled_code)
+ out = builtins.eval(expression, {"__builtins": {"abs": abs}}, args)
+ try:
+ return out.im
+ except AttributeError:
+ return out
diff --git a/Lib/site-packages/PIL/ImageMode.py b/Lib/site-packages/PIL/ImageMode.py
new file mode 100644
index 0000000..0b31f60
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageMode.py
@@ -0,0 +1,96 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# standard mode descriptors
+#
+# History:
+# 2006-03-20 fl Added
+#
+# Copyright (c) 2006 by Secret Labs AB.
+# Copyright (c) 2006 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import sys
+from functools import lru_cache
+
+
+class ModeDescriptor:
+ """Wrapper for mode strings."""
+
+ def __init__(
+ self,
+ mode: str,
+ bands: tuple[str, ...],
+ basemode: str,
+ basetype: str,
+ typestr: str,
+ ) -> None:
+ self.mode = mode
+ self.bands = bands
+ self.basemode = basemode
+ self.basetype = basetype
+ self.typestr = typestr
+
+ def __str__(self) -> str:
+ return self.mode
+
+
+@lru_cache
+def getmode(mode: str) -> ModeDescriptor:
+ """Gets a mode descriptor for the given mode."""
+ # initialize mode cache
+ endian = "<" if sys.byteorder == "little" else ">"
+
+ modes = {
+ # core modes
+ # Bits need to be extended to bytes
+ "1": ("L", "L", ("1",), "|b1"),
+ "L": ("L", "L", ("L",), "|u1"),
+ "I": ("L", "I", ("I",), endian + "i4"),
+ "F": ("L", "F", ("F",), endian + "f4"),
+ "P": ("P", "L", ("P",), "|u1"),
+ "RGB": ("RGB", "L", ("R", "G", "B"), "|u1"),
+ "RGBX": ("RGB", "L", ("R", "G", "B", "X"), "|u1"),
+ "RGBA": ("RGB", "L", ("R", "G", "B", "A"), "|u1"),
+ "CMYK": ("RGB", "L", ("C", "M", "Y", "K"), "|u1"),
+ "YCbCr": ("RGB", "L", ("Y", "Cb", "Cr"), "|u1"),
+ # UNDONE - unsigned |u1i1i1
+ "LAB": ("RGB", "L", ("L", "A", "B"), "|u1"),
+ "HSV": ("RGB", "L", ("H", "S", "V"), "|u1"),
+ # extra experimental modes
+ "RGBa": ("RGB", "L", ("R", "G", "B", "a"), "|u1"),
+ "BGR;15": ("RGB", "L", ("B", "G", "R"), "|u1"),
+ "BGR;16": ("RGB", "L", ("B", "G", "R"), "|u1"),
+ "BGR;24": ("RGB", "L", ("B", "G", "R"), "|u1"),
+ "LA": ("L", "L", ("L", "A"), "|u1"),
+ "La": ("L", "L", ("L", "a"), "|u1"),
+ "PA": ("RGB", "L", ("P", "A"), "|u1"),
+ }
+ if mode in modes:
+ base_mode, base_type, bands, type_str = modes[mode]
+ return ModeDescriptor(mode, bands, base_mode, base_type, type_str)
+
+ mapping_modes = {
+ # I;16 == I;16L, and I;32 == I;32L
+ "I;16": "u2",
+ "I;16BS": ">i2",
+ "I;16N": endian + "u2",
+ "I;16NS": endian + "i2",
+ "I;32": "u4",
+ "I;32L": "i4",
+ "I;32LS": "
+from __future__ import annotations
+
+import re
+
+from . import Image, _imagingmorph
+
+LUT_SIZE = 1 << 9
+
+# fmt: off
+ROTATION_MATRIX = [
+ 6, 3, 0,
+ 7, 4, 1,
+ 8, 5, 2,
+]
+MIRROR_MATRIX = [
+ 2, 1, 0,
+ 5, 4, 3,
+ 8, 7, 6,
+]
+# fmt: on
+
+
+class LutBuilder:
+ """A class for building a MorphLut from a descriptive language
+
+ The input patterns is a list of a strings sequences like these::
+
+ 4:(...
+ .1.
+ 111)->1
+
+ (whitespaces including linebreaks are ignored). The option 4
+ describes a series of symmetry operations (in this case a
+ 4-rotation), the pattern is described by:
+
+ - . or X - Ignore
+ - 1 - Pixel is on
+ - 0 - Pixel is off
+
+ The result of the operation is described after "->" string.
+
+ The default is to return the current pixel value, which is
+ returned if no other match is found.
+
+ Operations:
+
+ - 4 - 4 way rotation
+ - N - Negate
+ - 1 - Dummy op for no other operation (an op must always be given)
+ - M - Mirroring
+
+ Example::
+
+ lb = LutBuilder(patterns = ["4:(... .1. 111)->1"])
+ lut = lb.build_lut()
+
+ """
+
+ def __init__(self, patterns=None, op_name=None):
+ if patterns is not None:
+ self.patterns = patterns
+ else:
+ self.patterns = []
+ self.lut = None
+ if op_name is not None:
+ known_patterns = {
+ "corner": ["1:(... ... ...)->0", "4:(00. 01. ...)->1"],
+ "dilation4": ["4:(... .0. .1.)->1"],
+ "dilation8": ["4:(... .0. .1.)->1", "4:(... .0. ..1)->1"],
+ "erosion4": ["4:(... .1. .0.)->0"],
+ "erosion8": ["4:(... .1. .0.)->0", "4:(... .1. ..0)->0"],
+ "edge": [
+ "1:(... ... ...)->0",
+ "4:(.0. .1. ...)->1",
+ "4:(01. .1. ...)->1",
+ ],
+ }
+ if op_name not in known_patterns:
+ msg = "Unknown pattern " + op_name + "!"
+ raise Exception(msg)
+
+ self.patterns = known_patterns[op_name]
+
+ def add_patterns(self, patterns):
+ self.patterns += patterns
+
+ def build_default_lut(self):
+ symbols = [0, 1]
+ m = 1 << 4 # pos of current pixel
+ self.lut = bytearray(symbols[(i & m) > 0] for i in range(LUT_SIZE))
+
+ def get_lut(self):
+ return self.lut
+
+ def _string_permute(self, pattern, permutation):
+ """string_permute takes a pattern and a permutation and returns the
+ string permuted according to the permutation list.
+ """
+ assert len(permutation) == 9
+ return "".join(pattern[p] for p in permutation)
+
+ def _pattern_permute(self, basic_pattern, options, basic_result):
+ """pattern_permute takes a basic pattern and its result and clones
+ the pattern according to the modifications described in the $options
+ parameter. It returns a list of all cloned patterns."""
+ patterns = [(basic_pattern, basic_result)]
+
+ # rotations
+ if "4" in options:
+ res = patterns[-1][1]
+ for i in range(4):
+ patterns.append(
+ (self._string_permute(patterns[-1][0], ROTATION_MATRIX), res)
+ )
+ # mirror
+ if "M" in options:
+ n = len(patterns)
+ for pattern, res in patterns[:n]:
+ patterns.append((self._string_permute(pattern, MIRROR_MATRIX), res))
+
+ # negate
+ if "N" in options:
+ n = len(patterns)
+ for pattern, res in patterns[:n]:
+ # Swap 0 and 1
+ pattern = pattern.replace("0", "Z").replace("1", "0").replace("Z", "1")
+ res = 1 - int(res)
+ patterns.append((pattern, res))
+
+ return patterns
+
+ def build_lut(self):
+ """Compile all patterns into a morphology lut.
+
+ TBD :Build based on (file) morphlut:modify_lut
+ """
+ self.build_default_lut()
+ patterns = []
+
+ # Parse and create symmetries of the patterns strings
+ for p in self.patterns:
+ m = re.search(r"(\w*):?\s*\((.+?)\)\s*->\s*(\d)", p.replace("\n", ""))
+ if not m:
+ msg = 'Syntax error in pattern "' + p + '"'
+ raise Exception(msg)
+ options = m.group(1)
+ pattern = m.group(2)
+ result = int(m.group(3))
+
+ # Get rid of spaces
+ pattern = pattern.replace(" ", "").replace("\n", "")
+
+ patterns += self._pattern_permute(pattern, options, result)
+
+ # compile the patterns into regular expressions for speed
+ for i, pattern in enumerate(patterns):
+ p = pattern[0].replace(".", "X").replace("X", "[01]")
+ p = re.compile(p)
+ patterns[i] = (p, pattern[1])
+
+ # Step through table and find patterns that match.
+ # Note that all the patterns are searched. The last one
+ # caught overrides
+ for i in range(LUT_SIZE):
+ # Build the bit pattern
+ bitpattern = bin(i)[2:]
+ bitpattern = ("0" * (9 - len(bitpattern)) + bitpattern)[::-1]
+
+ for p, r in patterns:
+ if p.match(bitpattern):
+ self.lut[i] = [0, 1][r]
+
+ return self.lut
+
+
+class MorphOp:
+ """A class for binary morphological operators"""
+
+ def __init__(self, lut=None, op_name=None, patterns=None):
+ """Create a binary morphological operator"""
+ self.lut = lut
+ if op_name is not None:
+ self.lut = LutBuilder(op_name=op_name).build_lut()
+ elif patterns is not None:
+ self.lut = LutBuilder(patterns=patterns).build_lut()
+
+ def apply(self, image):
+ """Run a single morphological operation on an image
+
+ Returns a tuple of the number of changed pixels and the
+ morphed image"""
+ if self.lut is None:
+ msg = "No operator loaded"
+ raise Exception(msg)
+
+ if image.mode != "L":
+ msg = "Image mode must be L"
+ raise ValueError(msg)
+ outimage = Image.new(image.mode, image.size, None)
+ count = _imagingmorph.apply(bytes(self.lut), image.im.id, outimage.im.id)
+ return count, outimage
+
+ def match(self, image):
+ """Get a list of coordinates matching the morphological operation on
+ an image.
+
+ Returns a list of tuples of (x,y) coordinates
+ of all matching pixels. See :ref:`coordinate-system`."""
+ if self.lut is None:
+ msg = "No operator loaded"
+ raise Exception(msg)
+
+ if image.mode != "L":
+ msg = "Image mode must be L"
+ raise ValueError(msg)
+ return _imagingmorph.match(bytes(self.lut), image.im.id)
+
+ def get_on_pixels(self, image):
+ """Get a list of all turned on pixels in a binary image
+
+ Returns a list of tuples of (x,y) coordinates
+ of all matching pixels. See :ref:`coordinate-system`."""
+
+ if image.mode != "L":
+ msg = "Image mode must be L"
+ raise ValueError(msg)
+ return _imagingmorph.get_on_pixels(image.im.id)
+
+ def load_lut(self, filename):
+ """Load an operator from an mrl file"""
+ with open(filename, "rb") as f:
+ self.lut = bytearray(f.read())
+
+ if len(self.lut) != LUT_SIZE:
+ self.lut = None
+ msg = "Wrong size operator file!"
+ raise Exception(msg)
+
+ def save_lut(self, filename):
+ """Save an operator to an mrl file"""
+ if self.lut is None:
+ msg = "No operator loaded"
+ raise Exception(msg)
+ with open(filename, "wb") as f:
+ f.write(self.lut)
+
+ def set_lut(self, lut):
+ """Set the lut from an external source"""
+ self.lut = lut
diff --git a/Lib/site-packages/PIL/ImageOps.py b/Lib/site-packages/PIL/ImageOps.py
new file mode 100644
index 0000000..a9e626b
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageOps.py
@@ -0,0 +1,655 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# standard image operations
+#
+# History:
+# 2001-10-20 fl Created
+# 2001-10-23 fl Added autocontrast operator
+# 2001-12-18 fl Added Kevin's fit operator
+# 2004-03-14 fl Fixed potential division by zero in equalize
+# 2005-05-05 fl Fixed equalize for low number of values
+#
+# Copyright (c) 2001-2004 by Secret Labs AB
+# Copyright (c) 2001-2004 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import functools
+import operator
+import re
+
+from . import ExifTags, Image, ImagePalette
+
+#
+# helpers
+
+
+def _border(border):
+ if isinstance(border, tuple):
+ if len(border) == 2:
+ left, top = right, bottom = border
+ elif len(border) == 4:
+ left, top, right, bottom = border
+ else:
+ left = top = right = bottom = border
+ return left, top, right, bottom
+
+
+def _color(color, mode):
+ if isinstance(color, str):
+ from . import ImageColor
+
+ color = ImageColor.getcolor(color, mode)
+ return color
+
+
+def _lut(image, lut):
+ if image.mode == "P":
+ # FIXME: apply to lookup table, not image data
+ msg = "mode P support coming soon"
+ raise NotImplementedError(msg)
+ elif image.mode in ("L", "RGB"):
+ if image.mode == "RGB" and len(lut) == 256:
+ lut = lut + lut + lut
+ return image.point(lut)
+ else:
+ msg = f"not supported for mode {image.mode}"
+ raise OSError(msg)
+
+
+#
+# actions
+
+
+def autocontrast(image, cutoff=0, ignore=None, mask=None, preserve_tone=False):
+ """
+ Maximize (normalize) image contrast. This function calculates a
+ histogram of the input image (or mask region), removes ``cutoff`` percent of the
+ lightest and darkest pixels from the histogram, and remaps the image
+ so that the darkest pixel becomes black (0), and the lightest
+ becomes white (255).
+
+ :param image: The image to process.
+ :param cutoff: The percent to cut off from the histogram on the low and
+ high ends. Either a tuple of (low, high), or a single
+ number for both.
+ :param ignore: The background pixel value (use None for no background).
+ :param mask: Histogram used in contrast operation is computed using pixels
+ within the mask. If no mask is given the entire image is used
+ for histogram computation.
+ :param preserve_tone: Preserve image tone in Photoshop-like style autocontrast.
+
+ .. versionadded:: 8.2.0
+
+ :return: An image.
+ """
+ if preserve_tone:
+ histogram = image.convert("L").histogram(mask)
+ else:
+ histogram = image.histogram(mask)
+
+ lut = []
+ for layer in range(0, len(histogram), 256):
+ h = histogram[layer : layer + 256]
+ if ignore is not None:
+ # get rid of outliers
+ try:
+ h[ignore] = 0
+ except TypeError:
+ # assume sequence
+ for ix in ignore:
+ h[ix] = 0
+ if cutoff:
+ # cut off pixels from both ends of the histogram
+ if not isinstance(cutoff, tuple):
+ cutoff = (cutoff, cutoff)
+ # get number of pixels
+ n = 0
+ for ix in range(256):
+ n = n + h[ix]
+ # remove cutoff% pixels from the low end
+ cut = n * cutoff[0] // 100
+ for lo in range(256):
+ if cut > h[lo]:
+ cut = cut - h[lo]
+ h[lo] = 0
+ else:
+ h[lo] -= cut
+ cut = 0
+ if cut <= 0:
+ break
+ # remove cutoff% samples from the high end
+ cut = n * cutoff[1] // 100
+ for hi in range(255, -1, -1):
+ if cut > h[hi]:
+ cut = cut - h[hi]
+ h[hi] = 0
+ else:
+ h[hi] -= cut
+ cut = 0
+ if cut <= 0:
+ break
+ # find lowest/highest samples after preprocessing
+ for lo in range(256):
+ if h[lo]:
+ break
+ for hi in range(255, -1, -1):
+ if h[hi]:
+ break
+ if hi <= lo:
+ # don't bother
+ lut.extend(list(range(256)))
+ else:
+ scale = 255.0 / (hi - lo)
+ offset = -lo * scale
+ for ix in range(256):
+ ix = int(ix * scale + offset)
+ if ix < 0:
+ ix = 0
+ elif ix > 255:
+ ix = 255
+ lut.append(ix)
+ return _lut(image, lut)
+
+
+def colorize(image, black, white, mid=None, blackpoint=0, whitepoint=255, midpoint=127):
+ """
+ Colorize grayscale image.
+ This function calculates a color wedge which maps all black pixels in
+ the source image to the first color and all white pixels to the
+ second color. If ``mid`` is specified, it uses three-color mapping.
+ The ``black`` and ``white`` arguments should be RGB tuples or color names;
+ optionally you can use three-color mapping by also specifying ``mid``.
+ Mapping positions for any of the colors can be specified
+ (e.g. ``blackpoint``), where these parameters are the integer
+ value corresponding to where the corresponding color should be mapped.
+ These parameters must have logical order, such that
+ ``blackpoint <= midpoint <= whitepoint`` (if ``mid`` is specified).
+
+ :param image: The image to colorize.
+ :param black: The color to use for black input pixels.
+ :param white: The color to use for white input pixels.
+ :param mid: The color to use for midtone input pixels.
+ :param blackpoint: an int value [0, 255] for the black mapping.
+ :param whitepoint: an int value [0, 255] for the white mapping.
+ :param midpoint: an int value [0, 255] for the midtone mapping.
+ :return: An image.
+ """
+
+ # Initial asserts
+ assert image.mode == "L"
+ if mid is None:
+ assert 0 <= blackpoint <= whitepoint <= 255
+ else:
+ assert 0 <= blackpoint <= midpoint <= whitepoint <= 255
+
+ # Define colors from arguments
+ black = _color(black, "RGB")
+ white = _color(white, "RGB")
+ if mid is not None:
+ mid = _color(mid, "RGB")
+
+ # Empty lists for the mapping
+ red = []
+ green = []
+ blue = []
+
+ # Create the low-end values
+ for i in range(0, blackpoint):
+ red.append(black[0])
+ green.append(black[1])
+ blue.append(black[2])
+
+ # Create the mapping (2-color)
+ if mid is None:
+ range_map = range(0, whitepoint - blackpoint)
+
+ for i in range_map:
+ red.append(black[0] + i * (white[0] - black[0]) // len(range_map))
+ green.append(black[1] + i * (white[1] - black[1]) // len(range_map))
+ blue.append(black[2] + i * (white[2] - black[2]) // len(range_map))
+
+ # Create the mapping (3-color)
+ else:
+ range_map1 = range(0, midpoint - blackpoint)
+ range_map2 = range(0, whitepoint - midpoint)
+
+ for i in range_map1:
+ red.append(black[0] + i * (mid[0] - black[0]) // len(range_map1))
+ green.append(black[1] + i * (mid[1] - black[1]) // len(range_map1))
+ blue.append(black[2] + i * (mid[2] - black[2]) // len(range_map1))
+ for i in range_map2:
+ red.append(mid[0] + i * (white[0] - mid[0]) // len(range_map2))
+ green.append(mid[1] + i * (white[1] - mid[1]) // len(range_map2))
+ blue.append(mid[2] + i * (white[2] - mid[2]) // len(range_map2))
+
+ # Create the high-end values
+ for i in range(0, 256 - whitepoint):
+ red.append(white[0])
+ green.append(white[1])
+ blue.append(white[2])
+
+ # Return converted image
+ image = image.convert("RGB")
+ return _lut(image, red + green + blue)
+
+
+def contain(image, size, method=Image.Resampling.BICUBIC):
+ """
+ Returns a resized version of the image, set to the maximum width and height
+ within the requested size, while maintaining the original aspect ratio.
+
+ :param image: The image to resize.
+ :param size: The requested output size in pixels, given as a
+ (width, height) tuple.
+ :param method: Resampling method to use. Default is
+ :py:attr:`~PIL.Image.Resampling.BICUBIC`.
+ See :ref:`concept-filters`.
+ :return: An image.
+ """
+
+ im_ratio = image.width / image.height
+ dest_ratio = size[0] / size[1]
+
+ if im_ratio != dest_ratio:
+ if im_ratio > dest_ratio:
+ new_height = round(image.height / image.width * size[0])
+ if new_height != size[1]:
+ size = (size[0], new_height)
+ else:
+ new_width = round(image.width / image.height * size[1])
+ if new_width != size[0]:
+ size = (new_width, size[1])
+ return image.resize(size, resample=method)
+
+
+def cover(image, size, method=Image.Resampling.BICUBIC):
+ """
+ Returns a resized version of the image, so that the requested size is
+ covered, while maintaining the original aspect ratio.
+
+ :param image: The image to resize.
+ :param size: The requested output size in pixels, given as a
+ (width, height) tuple.
+ :param method: Resampling method to use. Default is
+ :py:attr:`~PIL.Image.Resampling.BICUBIC`.
+ See :ref:`concept-filters`.
+ :return: An image.
+ """
+
+ im_ratio = image.width / image.height
+ dest_ratio = size[0] / size[1]
+
+ if im_ratio != dest_ratio:
+ if im_ratio < dest_ratio:
+ new_height = round(image.height / image.width * size[0])
+ if new_height != size[1]:
+ size = (size[0], new_height)
+ else:
+ new_width = round(image.width / image.height * size[1])
+ if new_width != size[0]:
+ size = (new_width, size[1])
+ return image.resize(size, resample=method)
+
+
+def pad(image, size, method=Image.Resampling.BICUBIC, color=None, centering=(0.5, 0.5)):
+ """
+ Returns a resized and padded version of the image, expanded to fill the
+ requested aspect ratio and size.
+
+ :param image: The image to resize and crop.
+ :param size: The requested output size in pixels, given as a
+ (width, height) tuple.
+ :param method: Resampling method to use. Default is
+ :py:attr:`~PIL.Image.Resampling.BICUBIC`.
+ See :ref:`concept-filters`.
+ :param color: The background color of the padded image.
+ :param centering: Control the position of the original image within the
+ padded version.
+
+ (0.5, 0.5) will keep the image centered
+ (0, 0) will keep the image aligned to the top left
+ (1, 1) will keep the image aligned to the bottom
+ right
+ :return: An image.
+ """
+
+ resized = contain(image, size, method)
+ if resized.size == size:
+ out = resized
+ else:
+ out = Image.new(image.mode, size, color)
+ if resized.palette:
+ out.putpalette(resized.getpalette())
+ if resized.width != size[0]:
+ x = round((size[0] - resized.width) * max(0, min(centering[0], 1)))
+ out.paste(resized, (x, 0))
+ else:
+ y = round((size[1] - resized.height) * max(0, min(centering[1], 1)))
+ out.paste(resized, (0, y))
+ return out
+
+
+def crop(image, border=0):
+ """
+ Remove border from image. The same amount of pixels are removed
+ from all four sides. This function works on all image modes.
+
+ .. seealso:: :py:meth:`~PIL.Image.Image.crop`
+
+ :param image: The image to crop.
+ :param border: The number of pixels to remove.
+ :return: An image.
+ """
+ left, top, right, bottom = _border(border)
+ return image.crop((left, top, image.size[0] - right, image.size[1] - bottom))
+
+
+def scale(image, factor, resample=Image.Resampling.BICUBIC):
+ """
+ Returns a rescaled image by a specific factor given in parameter.
+ A factor greater than 1 expands the image, between 0 and 1 contracts the
+ image.
+
+ :param image: The image to rescale.
+ :param factor: The expansion factor, as a float.
+ :param resample: Resampling method to use. Default is
+ :py:attr:`~PIL.Image.Resampling.BICUBIC`.
+ See :ref:`concept-filters`.
+ :returns: An :py:class:`~PIL.Image.Image` object.
+ """
+ if factor == 1:
+ return image.copy()
+ elif factor <= 0:
+ msg = "the factor must be greater than 0"
+ raise ValueError(msg)
+ else:
+ size = (round(factor * image.width), round(factor * image.height))
+ return image.resize(size, resample)
+
+
+def deform(image, deformer, resample=Image.Resampling.BILINEAR):
+ """
+ Deform the image.
+
+ :param image: The image to deform.
+ :param deformer: A deformer object. Any object that implements a
+ ``getmesh`` method can be used.
+ :param resample: An optional resampling filter. Same values possible as
+ in the PIL.Image.transform function.
+ :return: An image.
+ """
+ return image.transform(
+ image.size, Image.Transform.MESH, deformer.getmesh(image), resample
+ )
+
+
+def equalize(image, mask=None):
+ """
+ Equalize the image histogram. This function applies a non-linear
+ mapping to the input image, in order to create a uniform
+ distribution of grayscale values in the output image.
+
+ :param image: The image to equalize.
+ :param mask: An optional mask. If given, only the pixels selected by
+ the mask are included in the analysis.
+ :return: An image.
+ """
+ if image.mode == "P":
+ image = image.convert("RGB")
+ h = image.histogram(mask)
+ lut = []
+ for b in range(0, len(h), 256):
+ histo = [_f for _f in h[b : b + 256] if _f]
+ if len(histo) <= 1:
+ lut.extend(list(range(256)))
+ else:
+ step = (functools.reduce(operator.add, histo) - histo[-1]) // 255
+ if not step:
+ lut.extend(list(range(256)))
+ else:
+ n = step // 2
+ for i in range(256):
+ lut.append(n // step)
+ n = n + h[i + b]
+ return _lut(image, lut)
+
+
+def expand(image, border=0, fill=0):
+ """
+ Add border to the image
+
+ :param image: The image to expand.
+ :param border: Border width, in pixels.
+ :param fill: Pixel fill value (a color value). Default is 0 (black).
+ :return: An image.
+ """
+ left, top, right, bottom = _border(border)
+ width = left + image.size[0] + right
+ height = top + image.size[1] + bottom
+ color = _color(fill, image.mode)
+ if image.palette:
+ palette = ImagePalette.ImagePalette(palette=image.getpalette())
+ if isinstance(color, tuple):
+ color = palette.getcolor(color)
+ else:
+ palette = None
+ out = Image.new(image.mode, (width, height), color)
+ if palette:
+ out.putpalette(palette.palette)
+ out.paste(image, (left, top))
+ return out
+
+
+def fit(image, size, method=Image.Resampling.BICUBIC, bleed=0.0, centering=(0.5, 0.5)):
+ """
+ Returns a resized and cropped version of the image, cropped to the
+ requested aspect ratio and size.
+
+ This function was contributed by Kevin Cazabon.
+
+ :param image: The image to resize and crop.
+ :param size: The requested output size in pixels, given as a
+ (width, height) tuple.
+ :param method: Resampling method to use. Default is
+ :py:attr:`~PIL.Image.Resampling.BICUBIC`.
+ See :ref:`concept-filters`.
+ :param bleed: Remove a border around the outside of the image from all
+ four edges. The value is a decimal percentage (use 0.01 for
+ one percent). The default value is 0 (no border).
+ Cannot be greater than or equal to 0.5.
+ :param centering: Control the cropping position. Use (0.5, 0.5) for
+ center cropping (e.g. if cropping the width, take 50% off
+ of the left side, and therefore 50% off the right side).
+ (0.0, 0.0) will crop from the top left corner (i.e. if
+ cropping the width, take all of the crop off of the right
+ side, and if cropping the height, take all of it off the
+ bottom). (1.0, 0.0) will crop from the bottom left
+ corner, etc. (i.e. if cropping the width, take all of the
+ crop off the left side, and if cropping the height take
+ none from the top, and therefore all off the bottom).
+ :return: An image.
+ """
+
+ # by Kevin Cazabon, Feb 17/2000
+ # kevin@cazabon.com
+ # https://www.cazabon.com
+
+ # ensure centering is mutable
+ centering = list(centering)
+
+ if not 0.0 <= centering[0] <= 1.0:
+ centering[0] = 0.5
+ if not 0.0 <= centering[1] <= 1.0:
+ centering[1] = 0.5
+
+ if not 0.0 <= bleed < 0.5:
+ bleed = 0.0
+
+ # calculate the area to use for resizing and cropping, subtracting
+ # the 'bleed' around the edges
+
+ # number of pixels to trim off on Top and Bottom, Left and Right
+ bleed_pixels = (bleed * image.size[0], bleed * image.size[1])
+
+ live_size = (
+ image.size[0] - bleed_pixels[0] * 2,
+ image.size[1] - bleed_pixels[1] * 2,
+ )
+
+ # calculate the aspect ratio of the live_size
+ live_size_ratio = live_size[0] / live_size[1]
+
+ # calculate the aspect ratio of the output image
+ output_ratio = size[0] / size[1]
+
+ # figure out if the sides or top/bottom will be cropped off
+ if live_size_ratio == output_ratio:
+ # live_size is already the needed ratio
+ crop_width = live_size[0]
+ crop_height = live_size[1]
+ elif live_size_ratio >= output_ratio:
+ # live_size is wider than what's needed, crop the sides
+ crop_width = output_ratio * live_size[1]
+ crop_height = live_size[1]
+ else:
+ # live_size is taller than what's needed, crop the top and bottom
+ crop_width = live_size[0]
+ crop_height = live_size[0] / output_ratio
+
+ # make the crop
+ crop_left = bleed_pixels[0] + (live_size[0] - crop_width) * centering[0]
+ crop_top = bleed_pixels[1] + (live_size[1] - crop_height) * centering[1]
+
+ crop = (crop_left, crop_top, crop_left + crop_width, crop_top + crop_height)
+
+ # resize the image and return it
+ return image.resize(size, method, box=crop)
+
+
+def flip(image):
+ """
+ Flip the image vertically (top to bottom).
+
+ :param image: The image to flip.
+ :return: An image.
+ """
+ return image.transpose(Image.Transpose.FLIP_TOP_BOTTOM)
+
+
+def grayscale(image):
+ """
+ Convert the image to grayscale.
+
+ :param image: The image to convert.
+ :return: An image.
+ """
+ return image.convert("L")
+
+
+def invert(image):
+ """
+ Invert (negate) the image.
+
+ :param image: The image to invert.
+ :return: An image.
+ """
+ lut = list(range(255, -1, -1))
+ return image.point(lut) if image.mode == "1" else _lut(image, lut)
+
+
+def mirror(image):
+ """
+ Flip image horizontally (left to right).
+
+ :param image: The image to mirror.
+ :return: An image.
+ """
+ return image.transpose(Image.Transpose.FLIP_LEFT_RIGHT)
+
+
+def posterize(image, bits):
+ """
+ Reduce the number of bits for each color channel.
+
+ :param image: The image to posterize.
+ :param bits: The number of bits to keep for each channel (1-8).
+ :return: An image.
+ """
+ mask = ~(2 ** (8 - bits) - 1)
+ lut = [i & mask for i in range(256)]
+ return _lut(image, lut)
+
+
+def solarize(image, threshold=128):
+ """
+ Invert all pixel values above a threshold.
+
+ :param image: The image to solarize.
+ :param threshold: All pixels above this grayscale level are inverted.
+ :return: An image.
+ """
+ lut = []
+ for i in range(256):
+ if i < threshold:
+ lut.append(i)
+ else:
+ lut.append(255 - i)
+ return _lut(image, lut)
+
+
+def exif_transpose(image, *, in_place=False):
+ """
+ If an image has an EXIF Orientation tag, other than 1, transpose the image
+ accordingly, and remove the orientation data.
+
+ :param image: The image to transpose.
+ :param in_place: Boolean. Keyword-only argument.
+ If ``True``, the original image is modified in-place, and ``None`` is returned.
+ If ``False`` (default), a new :py:class:`~PIL.Image.Image` object is returned
+ with the transposition applied. If there is no transposition, a copy of the
+ image will be returned.
+ """
+ image.load()
+ image_exif = image.getexif()
+ orientation = image_exif.get(ExifTags.Base.Orientation)
+ method = {
+ 2: Image.Transpose.FLIP_LEFT_RIGHT,
+ 3: Image.Transpose.ROTATE_180,
+ 4: Image.Transpose.FLIP_TOP_BOTTOM,
+ 5: Image.Transpose.TRANSPOSE,
+ 6: Image.Transpose.ROTATE_270,
+ 7: Image.Transpose.TRANSVERSE,
+ 8: Image.Transpose.ROTATE_90,
+ }.get(orientation)
+ if method is not None:
+ transposed_image = image.transpose(method)
+ if in_place:
+ image.im = transposed_image.im
+ image.pyaccess = None
+ image._size = transposed_image._size
+ exif_image = image if in_place else transposed_image
+
+ exif = exif_image.getexif()
+ if ExifTags.Base.Orientation in exif:
+ del exif[ExifTags.Base.Orientation]
+ if "exif" in exif_image.info:
+ exif_image.info["exif"] = exif.tobytes()
+ elif "Raw profile type exif" in exif_image.info:
+ exif_image.info["Raw profile type exif"] = exif.tobytes().hex()
+ elif "XML:com.adobe.xmp" in exif_image.info:
+ for pattern in (
+ r'tiff:Orientation="([0-9])"',
+ r"([0-9])",
+ ):
+ exif_image.info["XML:com.adobe.xmp"] = re.sub(
+ pattern, "", exif_image.info["XML:com.adobe.xmp"]
+ )
+ if not in_place:
+ return transposed_image
+ elif not in_place:
+ return image.copy()
diff --git a/Lib/site-packages/PIL/ImagePalette.py b/Lib/site-packages/PIL/ImagePalette.py
new file mode 100644
index 0000000..fbcfa30
--- /dev/null
+++ b/Lib/site-packages/PIL/ImagePalette.py
@@ -0,0 +1,262 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# image palette object
+#
+# History:
+# 1996-03-11 fl Rewritten.
+# 1997-01-03 fl Up and running.
+# 1997-08-23 fl Added load hack
+# 2001-04-16 fl Fixed randint shadow bug in random()
+#
+# Copyright (c) 1997-2001 by Secret Labs AB
+# Copyright (c) 1996-1997 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import array
+
+from . import GimpGradientFile, GimpPaletteFile, ImageColor, PaletteFile
+
+
+class ImagePalette:
+ """
+ Color palette for palette mapped images
+
+ :param mode: The mode to use for the palette. See:
+ :ref:`concept-modes`. Defaults to "RGB"
+ :param palette: An optional palette. If given, it must be a bytearray,
+ an array or a list of ints between 0-255. The list must consist of
+ all channels for one color followed by the next color (e.g. RGBRGBRGB).
+ Defaults to an empty palette.
+ """
+
+ def __init__(self, mode="RGB", palette=None):
+ self.mode = mode
+ self.rawmode = None # if set, palette contains raw data
+ self.palette = palette or bytearray()
+ self.dirty = None
+
+ @property
+ def palette(self):
+ return self._palette
+
+ @palette.setter
+ def palette(self, palette):
+ self._colors = None
+ self._palette = palette
+
+ @property
+ def colors(self):
+ if self._colors is None:
+ mode_len = len(self.mode)
+ self._colors = {}
+ for i in range(0, len(self.palette), mode_len):
+ color = tuple(self.palette[i : i + mode_len])
+ if color in self._colors:
+ continue
+ self._colors[color] = i // mode_len
+ return self._colors
+
+ @colors.setter
+ def colors(self, colors):
+ self._colors = colors
+
+ def copy(self):
+ new = ImagePalette()
+
+ new.mode = self.mode
+ new.rawmode = self.rawmode
+ if self.palette is not None:
+ new.palette = self.palette[:]
+ new.dirty = self.dirty
+
+ return new
+
+ def getdata(self):
+ """
+ Get palette contents in format suitable for the low-level
+ ``im.putpalette`` primitive.
+
+ .. warning:: This method is experimental.
+ """
+ if self.rawmode:
+ return self.rawmode, self.palette
+ return self.mode, self.tobytes()
+
+ def tobytes(self):
+ """Convert palette to bytes.
+
+ .. warning:: This method is experimental.
+ """
+ if self.rawmode:
+ msg = "palette contains raw palette data"
+ raise ValueError(msg)
+ if isinstance(self.palette, bytes):
+ return self.palette
+ arr = array.array("B", self.palette)
+ return arr.tobytes()
+
+ # Declare tostring as an alias for tobytes
+ tostring = tobytes
+
+ def _new_color_index(self, image=None, e=None):
+ if not isinstance(self.palette, bytearray):
+ self._palette = bytearray(self.palette)
+ index = len(self.palette) // 3
+ special_colors = ()
+ if image:
+ special_colors = (
+ image.info.get("background"),
+ image.info.get("transparency"),
+ )
+ while index in special_colors:
+ index += 1
+ if index >= 256:
+ if image:
+ # Search for an unused index
+ for i, count in reversed(list(enumerate(image.histogram()))):
+ if count == 0 and i not in special_colors:
+ index = i
+ break
+ if index >= 256:
+ msg = "cannot allocate more than 256 colors"
+ raise ValueError(msg) from e
+ return index
+
+ def getcolor(self, color, image=None):
+ """Given an rgb tuple, allocate palette entry.
+
+ .. warning:: This method is experimental.
+ """
+ if self.rawmode:
+ msg = "palette contains raw palette data"
+ raise ValueError(msg)
+ if isinstance(color, tuple):
+ if self.mode == "RGB":
+ if len(color) == 4:
+ if color[3] != 255:
+ msg = "cannot add non-opaque RGBA color to RGB palette"
+ raise ValueError(msg)
+ color = color[:3]
+ elif self.mode == "RGBA":
+ if len(color) == 3:
+ color += (255,)
+ try:
+ return self.colors[color]
+ except KeyError as e:
+ # allocate new color slot
+ index = self._new_color_index(image, e)
+ self.colors[color] = index
+ if index * 3 < len(self.palette):
+ self._palette = (
+ self.palette[: index * 3]
+ + bytes(color)
+ + self.palette[index * 3 + 3 :]
+ )
+ else:
+ self._palette += bytes(color)
+ self.dirty = 1
+ return index
+ else:
+ msg = f"unknown color specifier: {repr(color)}"
+ raise ValueError(msg)
+
+ def save(self, fp):
+ """Save palette to text file.
+
+ .. warning:: This method is experimental.
+ """
+ if self.rawmode:
+ msg = "palette contains raw palette data"
+ raise ValueError(msg)
+ if isinstance(fp, str):
+ fp = open(fp, "w")
+ fp.write("# Palette\n")
+ fp.write(f"# Mode: {self.mode}\n")
+ for i in range(256):
+ fp.write(f"{i}")
+ for j in range(i * len(self.mode), (i + 1) * len(self.mode)):
+ try:
+ fp.write(f" {self.palette[j]}")
+ except IndexError:
+ fp.write(" 0")
+ fp.write("\n")
+ fp.close()
+
+
+# --------------------------------------------------------------------
+# Internal
+
+
+def raw(rawmode, data):
+ palette = ImagePalette()
+ palette.rawmode = rawmode
+ palette.palette = data
+ palette.dirty = 1
+ return palette
+
+
+# --------------------------------------------------------------------
+# Factories
+
+
+def make_linear_lut(black, white):
+ if black == 0:
+ return [white * i // 255 for i in range(256)]
+
+ msg = "unavailable when black is non-zero"
+ raise NotImplementedError(msg) # FIXME
+
+
+def make_gamma_lut(exp):
+ return [int(((i / 255.0) ** exp) * 255.0 + 0.5) for i in range(256)]
+
+
+def negative(mode="RGB"):
+ palette = list(range(256 * len(mode)))
+ palette.reverse()
+ return ImagePalette(mode, [i // len(mode) for i in palette])
+
+
+def random(mode="RGB"):
+ from random import randint
+
+ palette = [randint(0, 255) for _ in range(256 * len(mode))]
+ return ImagePalette(mode, palette)
+
+
+def sepia(white="#fff0c0"):
+ bands = [make_linear_lut(0, band) for band in ImageColor.getrgb(white)]
+ return ImagePalette("RGB", [bands[i % 3][i // 3] for i in range(256 * 3)])
+
+
+def wedge(mode="RGB"):
+ palette = list(range(256 * len(mode)))
+ return ImagePalette(mode, [i // len(mode) for i in palette])
+
+
+def load(filename):
+ # FIXME: supports GIMP gradients only
+
+ with open(filename, "rb") as fp:
+ for paletteHandler in [
+ GimpPaletteFile.GimpPaletteFile,
+ GimpGradientFile.GimpGradientFile,
+ PaletteFile.PaletteFile,
+ ]:
+ try:
+ fp.seek(0)
+ lut = paletteHandler(fp).getpalette()
+ if lut:
+ break
+ except (SyntaxError, ValueError):
+ pass
+ else:
+ msg = "cannot load palette"
+ raise OSError(msg)
+
+ return lut # data, rawmode
diff --git a/Lib/site-packages/PIL/ImagePath.py b/Lib/site-packages/PIL/ImagePath.py
new file mode 100644
index 0000000..77e8a60
--- /dev/null
+++ b/Lib/site-packages/PIL/ImagePath.py
@@ -0,0 +1,20 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# path interface
+#
+# History:
+# 1996-11-04 fl Created
+# 2002-04-14 fl Added documentation stub class
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1996.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+from . import Image
+
+Path = Image.core.path
diff --git a/Lib/site-packages/PIL/ImageQt.py b/Lib/site-packages/PIL/ImageQt.py
new file mode 100644
index 0000000..6377c75
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageQt.py
@@ -0,0 +1,197 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# a simple Qt image interface.
+#
+# history:
+# 2006-06-03 fl: created
+# 2006-06-04 fl: inherit from QImage instead of wrapping it
+# 2006-06-05 fl: removed toimage helper; move string support to ImageQt
+# 2013-11-13 fl: add support for Qt5 (aurelien.ballier@cyclonit.com)
+#
+# Copyright (c) 2006 by Secret Labs AB
+# Copyright (c) 2006 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import sys
+from io import BytesIO
+
+from . import Image
+from ._util import is_path
+
+qt_versions = [
+ ["6", "PyQt6"],
+ ["side6", "PySide6"],
+]
+
+# If a version has already been imported, attempt it first
+qt_versions.sort(key=lambda qt_version: qt_version[1] in sys.modules, reverse=True)
+for qt_version, qt_module in qt_versions:
+ try:
+ if qt_module == "PyQt6":
+ from PyQt6.QtCore import QBuffer, QIODevice
+ from PyQt6.QtGui import QImage, QPixmap, qRgba
+ elif qt_module == "PySide6":
+ from PySide6.QtCore import QBuffer, QIODevice
+ from PySide6.QtGui import QImage, QPixmap, qRgba
+ except (ImportError, RuntimeError):
+ continue
+ qt_is_installed = True
+ break
+else:
+ qt_is_installed = False
+ qt_version = None
+
+
+def rgb(r, g, b, a=255):
+ """(Internal) Turns an RGB color into a Qt compatible color integer."""
+ # use qRgb to pack the colors, and then turn the resulting long
+ # into a negative integer with the same bitpattern.
+ return qRgba(r, g, b, a) & 0xFFFFFFFF
+
+
+def fromqimage(im):
+ """
+ :param im: QImage or PIL ImageQt object
+ """
+ buffer = QBuffer()
+ if qt_version == "6":
+ try:
+ qt_openmode = QIODevice.OpenModeFlag
+ except AttributeError:
+ qt_openmode = QIODevice.OpenMode
+ else:
+ qt_openmode = QIODevice
+ buffer.open(qt_openmode.ReadWrite)
+ # preserve alpha channel with png
+ # otherwise ppm is more friendly with Image.open
+ if im.hasAlphaChannel():
+ im.save(buffer, "png")
+ else:
+ im.save(buffer, "ppm")
+
+ b = BytesIO()
+ b.write(buffer.data())
+ buffer.close()
+ b.seek(0)
+
+ return Image.open(b)
+
+
+def fromqpixmap(im):
+ return fromqimage(im)
+
+
+def align8to32(bytes, width, mode):
+ """
+ converts each scanline of data from 8 bit to 32 bit aligned
+ """
+
+ bits_per_pixel = {"1": 1, "L": 8, "P": 8, "I;16": 16}[mode]
+
+ # calculate bytes per line and the extra padding if needed
+ bits_per_line = bits_per_pixel * width
+ full_bytes_per_line, remaining_bits_per_line = divmod(bits_per_line, 8)
+ bytes_per_line = full_bytes_per_line + (1 if remaining_bits_per_line else 0)
+
+ extra_padding = -bytes_per_line % 4
+
+ # already 32 bit aligned by luck
+ if not extra_padding:
+ return bytes
+
+ new_data = [
+ bytes[i * bytes_per_line : (i + 1) * bytes_per_line] + b"\x00" * extra_padding
+ for i in range(len(bytes) // bytes_per_line)
+ ]
+
+ return b"".join(new_data)
+
+
+def _toqclass_helper(im):
+ data = None
+ colortable = None
+ exclusive_fp = False
+
+ # handle filename, if given instead of image name
+ if hasattr(im, "toUtf8"):
+ # FIXME - is this really the best way to do this?
+ im = str(im.toUtf8(), "utf-8")
+ if is_path(im):
+ im = Image.open(im)
+ exclusive_fp = True
+
+ qt_format = QImage.Format if qt_version == "6" else QImage
+ if im.mode == "1":
+ format = qt_format.Format_Mono
+ elif im.mode == "L":
+ format = qt_format.Format_Indexed8
+ colortable = [rgb(i, i, i) for i in range(256)]
+ elif im.mode == "P":
+ format = qt_format.Format_Indexed8
+ palette = im.getpalette()
+ colortable = [rgb(*palette[i : i + 3]) for i in range(0, len(palette), 3)]
+ elif im.mode == "RGB":
+ # Populate the 4th channel with 255
+ im = im.convert("RGBA")
+
+ data = im.tobytes("raw", "BGRA")
+ format = qt_format.Format_RGB32
+ elif im.mode == "RGBA":
+ data = im.tobytes("raw", "BGRA")
+ format = qt_format.Format_ARGB32
+ elif im.mode == "I;16" and hasattr(qt_format, "Format_Grayscale16"): # Qt 5.13+
+ im = im.point(lambda i: i * 256)
+
+ format = qt_format.Format_Grayscale16
+ else:
+ if exclusive_fp:
+ im.close()
+ msg = f"unsupported image mode {repr(im.mode)}"
+ raise ValueError(msg)
+
+ size = im.size
+ __data = data or align8to32(im.tobytes(), size[0], im.mode)
+ if exclusive_fp:
+ im.close()
+ return {"data": __data, "size": size, "format": format, "colortable": colortable}
+
+
+if qt_is_installed:
+
+ class ImageQt(QImage):
+ def __init__(self, im):
+ """
+ An PIL image wrapper for Qt. This is a subclass of PyQt's QImage
+ class.
+
+ :param im: A PIL Image object, or a file name (given either as
+ Python string or a PyQt string object).
+ """
+ im_data = _toqclass_helper(im)
+ # must keep a reference, or Qt will crash!
+ # All QImage constructors that take data operate on an existing
+ # buffer, so this buffer has to hang on for the life of the image.
+ # Fixes https://github.com/python-pillow/Pillow/issues/1370
+ self.__data = im_data["data"]
+ super().__init__(
+ self.__data,
+ im_data["size"][0],
+ im_data["size"][1],
+ im_data["format"],
+ )
+ if im_data["colortable"]:
+ self.setColorTable(im_data["colortable"])
+
+
+def toqimage(im):
+ return ImageQt(im)
+
+
+def toqpixmap(im):
+ qimage = toqimage(im)
+ return QPixmap.fromImage(qimage)
diff --git a/Lib/site-packages/PIL/ImageSequence.py b/Lib/site-packages/PIL/ImageSequence.py
new file mode 100644
index 0000000..2c18502
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageSequence.py
@@ -0,0 +1,86 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# sequence support classes
+#
+# history:
+# 1997-02-20 fl Created
+#
+# Copyright (c) 1997 by Secret Labs AB.
+# Copyright (c) 1997 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+
+##
+from __future__ import annotations
+
+from typing import Callable
+
+from . import Image
+
+
+class Iterator:
+ """
+ This class implements an iterator object that can be used to loop
+ over an image sequence.
+
+ You can use the ``[]`` operator to access elements by index. This operator
+ will raise an :py:exc:`IndexError` if you try to access a nonexistent
+ frame.
+
+ :param im: An image object.
+ """
+
+ def __init__(self, im: Image.Image):
+ if not hasattr(im, "seek"):
+ msg = "im must have seek method"
+ raise AttributeError(msg)
+ self.im = im
+ self.position = getattr(self.im, "_min_frame", 0)
+
+ def __getitem__(self, ix: int) -> Image.Image:
+ try:
+ self.im.seek(ix)
+ return self.im
+ except EOFError as e:
+ msg = "end of sequence"
+ raise IndexError(msg) from e
+
+ def __iter__(self) -> Iterator:
+ return self
+
+ def __next__(self) -> Image.Image:
+ try:
+ self.im.seek(self.position)
+ self.position += 1
+ return self.im
+ except EOFError as e:
+ msg = "end of sequence"
+ raise StopIteration(msg) from e
+
+
+def all_frames(
+ im: Image.Image | list[Image.Image],
+ func: Callable[[Image.Image], Image.Image] | None = None,
+) -> list[Image.Image]:
+ """
+ Applies a given function to all frames in an image or a list of images.
+ The frames are returned as a list of separate images.
+
+ :param im: An image, or a list of images.
+ :param func: The function to apply to all of the image frames.
+ :returns: A list of images.
+ """
+ if not isinstance(im, list):
+ im = [im]
+
+ ims = []
+ for imSequence in im:
+ current = imSequence.tell()
+
+ ims += [im_frame.copy() for im_frame in Iterator(imSequence)]
+
+ imSequence.seek(current)
+ return [func(im) for im in ims] if func else ims
diff --git a/Lib/site-packages/PIL/ImageShow.py b/Lib/site-packages/PIL/ImageShow.py
new file mode 100644
index 0000000..fad3e09
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageShow.py
@@ -0,0 +1,326 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# im.show() drivers
+#
+# History:
+# 2008-04-06 fl Created
+#
+# Copyright (c) Secret Labs AB 2008.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import os
+import shutil
+import subprocess
+import sys
+from shlex import quote
+
+from . import Image
+
+_viewers = []
+
+
+def register(viewer, order=1):
+ """
+ The :py:func:`register` function is used to register additional viewers::
+
+ from PIL import ImageShow
+ ImageShow.register(MyViewer()) # MyViewer will be used as a last resort
+ ImageShow.register(MySecondViewer(), 0) # MySecondViewer will be prioritised
+ ImageShow.register(ImageShow.XVViewer(), 0) # XVViewer will be prioritised
+
+ :param viewer: The viewer to be registered.
+ :param order:
+ Zero or a negative integer to prepend this viewer to the list,
+ a positive integer to append it.
+ """
+ try:
+ if issubclass(viewer, Viewer):
+ viewer = viewer()
+ except TypeError:
+ pass # raised if viewer wasn't a class
+ if order > 0:
+ _viewers.append(viewer)
+ else:
+ _viewers.insert(0, viewer)
+
+
+def show(image, title=None, **options):
+ r"""
+ Display a given image.
+
+ :param image: An image object.
+ :param title: Optional title. Not all viewers can display the title.
+ :param \**options: Additional viewer options.
+ :returns: ``True`` if a suitable viewer was found, ``False`` otherwise.
+ """
+ for viewer in _viewers:
+ if viewer.show(image, title=title, **options):
+ return True
+ return False
+
+
+class Viewer:
+ """Base class for viewers."""
+
+ # main api
+
+ def show(self, image, **options):
+ """
+ The main function for displaying an image.
+ Converts the given image to the target format and displays it.
+ """
+
+ if not (
+ image.mode in ("1", "RGBA")
+ or (self.format == "PNG" and image.mode in ("I;16", "LA"))
+ ):
+ base = Image.getmodebase(image.mode)
+ if image.mode != base:
+ image = image.convert(base)
+
+ return self.show_image(image, **options)
+
+ # hook methods
+
+ format = None
+ """The format to convert the image into."""
+ options = {}
+ """Additional options used to convert the image."""
+
+ def get_format(self, image):
+ """Return format name, or ``None`` to save as PGM/PPM."""
+ return self.format
+
+ def get_command(self, file, **options):
+ """
+ Returns the command used to display the file.
+ Not implemented in the base class.
+ """
+ msg = "unavailable in base viewer"
+ raise NotImplementedError(msg)
+
+ def save_image(self, image):
+ """Save to temporary file and return filename."""
+ return image._dump(format=self.get_format(image), **self.options)
+
+ def show_image(self, image, **options):
+ """Display the given image."""
+ return self.show_file(self.save_image(image), **options)
+
+ def show_file(self, path, **options):
+ """
+ Display given file.
+ """
+ os.system(self.get_command(path, **options)) # nosec
+ return 1
+
+
+# --------------------------------------------------------------------
+
+
+class WindowsViewer(Viewer):
+ """The default viewer on Windows is the default system application for PNG files."""
+
+ format = "PNG"
+ options = {"compress_level": 1, "save_all": True}
+
+ def get_command(self, file, **options):
+ return (
+ f'start "Pillow" /WAIT "{file}" '
+ "&& ping -n 4 127.0.0.1 >NUL "
+ f'&& del /f "{file}"'
+ )
+
+
+if sys.platform == "win32":
+ register(WindowsViewer)
+
+
+class MacViewer(Viewer):
+ """The default viewer on macOS using ``Preview.app``."""
+
+ format = "PNG"
+ options = {"compress_level": 1, "save_all": True}
+
+ def get_command(self, file, **options):
+ # on darwin open returns immediately resulting in the temp
+ # file removal while app is opening
+ command = "open -a Preview.app"
+ command = f"({command} {quote(file)}; sleep 20; rm -f {quote(file)})&"
+ return command
+
+ def show_file(self, path, **options):
+ """
+ Display given file.
+ """
+ subprocess.call(["open", "-a", "Preview.app", path])
+ executable = sys.executable or shutil.which("python3")
+ if executable:
+ subprocess.Popen(
+ [
+ executable,
+ "-c",
+ "import os, sys, time; time.sleep(20); os.remove(sys.argv[1])",
+ path,
+ ]
+ )
+ return 1
+
+
+if sys.platform == "darwin":
+ register(MacViewer)
+
+
+class UnixViewer(Viewer):
+ format = "PNG"
+ options = {"compress_level": 1, "save_all": True}
+
+ def get_command(self, file, **options):
+ command = self.get_command_ex(file, **options)[0]
+ return f"({command} {quote(file)}"
+
+
+class XDGViewer(UnixViewer):
+ """
+ The freedesktop.org ``xdg-open`` command.
+ """
+
+ def get_command_ex(self, file, **options):
+ command = executable = "xdg-open"
+ return command, executable
+
+ def show_file(self, path, **options):
+ """
+ Display given file.
+ """
+ subprocess.Popen(["xdg-open", path])
+ return 1
+
+
+class DisplayViewer(UnixViewer):
+ """
+ The ImageMagick ``display`` command.
+ This viewer supports the ``title`` parameter.
+ """
+
+ def get_command_ex(self, file, title=None, **options):
+ command = executable = "display"
+ if title:
+ command += f" -title {quote(title)}"
+ return command, executable
+
+ def show_file(self, path, **options):
+ """
+ Display given file.
+ """
+ args = ["display"]
+ title = options.get("title")
+ if title:
+ args += ["-title", title]
+ args.append(path)
+
+ subprocess.Popen(args)
+ return 1
+
+
+class GmDisplayViewer(UnixViewer):
+ """The GraphicsMagick ``gm display`` command."""
+
+ def get_command_ex(self, file, **options):
+ executable = "gm"
+ command = "gm display"
+ return command, executable
+
+ def show_file(self, path, **options):
+ """
+ Display given file.
+ """
+ subprocess.Popen(["gm", "display", path])
+ return 1
+
+
+class EogViewer(UnixViewer):
+ """The GNOME Image Viewer ``eog`` command."""
+
+ def get_command_ex(self, file, **options):
+ executable = "eog"
+ command = "eog -n"
+ return command, executable
+
+ def show_file(self, path, **options):
+ """
+ Display given file.
+ """
+ subprocess.Popen(["eog", "-n", path])
+ return 1
+
+
+class XVViewer(UnixViewer):
+ """
+ The X Viewer ``xv`` command.
+ This viewer supports the ``title`` parameter.
+ """
+
+ def get_command_ex(self, file, title=None, **options):
+ # note: xv is pretty outdated. most modern systems have
+ # imagemagick's display command instead.
+ command = executable = "xv"
+ if title:
+ command += f" -name {quote(title)}"
+ return command, executable
+
+ def show_file(self, path, **options):
+ """
+ Display given file.
+ """
+ args = ["xv"]
+ title = options.get("title")
+ if title:
+ args += ["-name", title]
+ args.append(path)
+
+ subprocess.Popen(args)
+ return 1
+
+
+if sys.platform not in ("win32", "darwin"): # unixoids
+ if shutil.which("xdg-open"):
+ register(XDGViewer)
+ if shutil.which("display"):
+ register(DisplayViewer)
+ if shutil.which("gm"):
+ register(GmDisplayViewer)
+ if shutil.which("eog"):
+ register(EogViewer)
+ if shutil.which("xv"):
+ register(XVViewer)
+
+
+class IPythonViewer(Viewer):
+ """The viewer for IPython frontends."""
+
+ def show_image(self, image, **options):
+ ipython_display(image)
+ return 1
+
+
+try:
+ from IPython.display import display as ipython_display
+except ImportError:
+ pass
+else:
+ register(IPythonViewer)
+
+
+if __name__ == "__main__":
+ if len(sys.argv) < 2:
+ print("Syntax: python3 ImageShow.py imagefile [title]")
+ sys.exit()
+
+ with Image.open(sys.argv[1]) as im:
+ print(show(im, *sys.argv[2:]))
diff --git a/Lib/site-packages/PIL/ImageStat.py b/Lib/site-packages/PIL/ImageStat.py
new file mode 100644
index 0000000..13864e5
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageStat.py
@@ -0,0 +1,129 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# global image statistics
+#
+# History:
+# 1996-04-05 fl Created
+# 1997-05-21 fl Added mask; added rms, var, stddev attributes
+# 1997-08-05 fl Added median
+# 1998-07-05 hk Fixed integer overflow error
+#
+# Notes:
+# This class shows how to implement delayed evaluation of attributes.
+# To get a certain value, simply access the corresponding attribute.
+# The __getattr__ dispatcher takes care of the rest.
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1996-97.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import math
+
+
+class Stat:
+ def __init__(self, image_or_list, mask=None):
+ try:
+ if mask:
+ self.h = image_or_list.histogram(mask)
+ else:
+ self.h = image_or_list.histogram()
+ except AttributeError:
+ self.h = image_or_list # assume it to be a histogram list
+ if not isinstance(self.h, list):
+ msg = "first argument must be image or list"
+ raise TypeError(msg)
+ self.bands = list(range(len(self.h) // 256))
+
+ def __getattr__(self, id):
+ """Calculate missing attribute"""
+ if id[:4] == "_get":
+ raise AttributeError(id)
+ # calculate missing attribute
+ v = getattr(self, "_get" + id)()
+ setattr(self, id, v)
+ return v
+
+ def _getextrema(self):
+ """Get min/max values for each band in the image"""
+
+ def minmax(histogram):
+ res_min, res_max = 255, 0
+ for i in range(256):
+ if histogram[i]:
+ res_min = i
+ break
+ for i in range(255, -1, -1):
+ if histogram[i]:
+ res_max = i
+ break
+ return res_min, res_max
+
+ return [minmax(self.h[i:]) for i in range(0, len(self.h), 256)]
+
+ def _getcount(self):
+ """Get total number of pixels in each layer"""
+ return [sum(self.h[i : i + 256]) for i in range(0, len(self.h), 256)]
+
+ def _getsum(self):
+ """Get sum of all pixels in each layer"""
+
+ v = []
+ for i in range(0, len(self.h), 256):
+ layer_sum = 0.0
+ for j in range(256):
+ layer_sum += j * self.h[i + j]
+ v.append(layer_sum)
+ return v
+
+ def _getsum2(self):
+ """Get squared sum of all pixels in each layer"""
+
+ v = []
+ for i in range(0, len(self.h), 256):
+ sum2 = 0.0
+ for j in range(256):
+ sum2 += (j**2) * float(self.h[i + j])
+ v.append(sum2)
+ return v
+
+ def _getmean(self):
+ """Get average pixel level for each layer"""
+ return [self.sum[i] / self.count[i] for i in self.bands]
+
+ def _getmedian(self):
+ """Get median pixel level for each layer"""
+
+ v = []
+ for i in self.bands:
+ s = 0
+ half = self.count[i] // 2
+ b = i * 256
+ for j in range(256):
+ s = s + self.h[b + j]
+ if s > half:
+ break
+ v.append(j)
+ return v
+
+ def _getrms(self):
+ """Get RMS for each layer"""
+ return [math.sqrt(self.sum2[i] / self.count[i]) for i in self.bands]
+
+ def _getvar(self):
+ """Get variance for each layer"""
+ return [
+ (self.sum2[i] - (self.sum[i] ** 2.0) / self.count[i]) / self.count[i]
+ for i in self.bands
+ ]
+
+ def _getstddev(self):
+ """Get standard deviation for each layer"""
+ return [math.sqrt(self.var[i]) for i in self.bands]
+
+
+Global = Stat # compatibility
diff --git a/Lib/site-packages/PIL/ImageTk.py b/Lib/site-packages/PIL/ImageTk.py
new file mode 100644
index 0000000..10b2cc6
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageTk.py
@@ -0,0 +1,284 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# a Tk display interface
+#
+# History:
+# 96-04-08 fl Created
+# 96-09-06 fl Added getimage method
+# 96-11-01 fl Rewritten, removed image attribute and crop method
+# 97-05-09 fl Use PyImagingPaste method instead of image type
+# 97-05-12 fl Minor tweaks to match the IFUNC95 interface
+# 97-05-17 fl Support the "pilbitmap" booster patch
+# 97-06-05 fl Added file= and data= argument to image constructors
+# 98-03-09 fl Added width and height methods to Image classes
+# 98-07-02 fl Use default mode for "P" images without palette attribute
+# 98-07-02 fl Explicitly destroy Tkinter image objects
+# 99-07-24 fl Support multiple Tk interpreters (from Greg Couch)
+# 99-07-26 fl Automatically hook into Tkinter (if possible)
+# 99-08-15 fl Hook uses _imagingtk instead of _imaging
+#
+# Copyright (c) 1997-1999 by Secret Labs AB
+# Copyright (c) 1996-1997 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import tkinter
+from io import BytesIO
+
+from . import Image
+
+# --------------------------------------------------------------------
+# Check for Tkinter interface hooks
+
+_pilbitmap_ok = None
+
+
+def _pilbitmap_check():
+ global _pilbitmap_ok
+ if _pilbitmap_ok is None:
+ try:
+ im = Image.new("1", (1, 1))
+ tkinter.BitmapImage(data=f"PIL:{im.im.id}")
+ _pilbitmap_ok = 1
+ except tkinter.TclError:
+ _pilbitmap_ok = 0
+ return _pilbitmap_ok
+
+
+def _get_image_from_kw(kw):
+ source = None
+ if "file" in kw:
+ source = kw.pop("file")
+ elif "data" in kw:
+ source = BytesIO(kw.pop("data"))
+ if source:
+ return Image.open(source)
+
+
+def _pyimagingtkcall(command, photo, id):
+ tk = photo.tk
+ try:
+ tk.call(command, photo, id)
+ except tkinter.TclError:
+ # activate Tkinter hook
+ # may raise an error if it cannot attach to Tkinter
+ from . import _imagingtk
+
+ _imagingtk.tkinit(tk.interpaddr())
+ tk.call(command, photo, id)
+
+
+# --------------------------------------------------------------------
+# PhotoImage
+
+
+class PhotoImage:
+ """
+ A Tkinter-compatible photo image. This can be used
+ everywhere Tkinter expects an image object. If the image is an RGBA
+ image, pixels having alpha 0 are treated as transparent.
+
+ The constructor takes either a PIL image, or a mode and a size.
+ Alternatively, you can use the ``file`` or ``data`` options to initialize
+ the photo image object.
+
+ :param image: Either a PIL image, or a mode string. If a mode string is
+ used, a size must also be given.
+ :param size: If the first argument is a mode string, this defines the size
+ of the image.
+ :keyword file: A filename to load the image from (using
+ ``Image.open(file)``).
+ :keyword data: An 8-bit string containing image data (as loaded from an
+ image file).
+ """
+
+ def __init__(self, image=None, size=None, **kw):
+ # Tk compatibility: file or data
+ if image is None:
+ image = _get_image_from_kw(kw)
+
+ if hasattr(image, "mode") and hasattr(image, "size"):
+ # got an image instead of a mode
+ mode = image.mode
+ if mode == "P":
+ # palette mapped data
+ image.apply_transparency()
+ image.load()
+ try:
+ mode = image.palette.mode
+ except AttributeError:
+ mode = "RGB" # default
+ size = image.size
+ kw["width"], kw["height"] = size
+ else:
+ mode = image
+ image = None
+
+ if mode not in ["1", "L", "RGB", "RGBA"]:
+ mode = Image.getmodebase(mode)
+
+ self.__mode = mode
+ self.__size = size
+ self.__photo = tkinter.PhotoImage(**kw)
+ self.tk = self.__photo.tk
+ if image:
+ self.paste(image)
+
+ def __del__(self):
+ name = self.__photo.name
+ self.__photo.name = None
+ try:
+ self.__photo.tk.call("image", "delete", name)
+ except Exception:
+ pass # ignore internal errors
+
+ def __str__(self):
+ """
+ Get the Tkinter photo image identifier. This method is automatically
+ called by Tkinter whenever a PhotoImage object is passed to a Tkinter
+ method.
+
+ :return: A Tkinter photo image identifier (a string).
+ """
+ return str(self.__photo)
+
+ def width(self):
+ """
+ Get the width of the image.
+
+ :return: The width, in pixels.
+ """
+ return self.__size[0]
+
+ def height(self):
+ """
+ Get the height of the image.
+
+ :return: The height, in pixels.
+ """
+ return self.__size[1]
+
+ def paste(self, im):
+ """
+ Paste a PIL image into the photo image. Note that this can
+ be very slow if the photo image is displayed.
+
+ :param im: A PIL image. The size must match the target region. If the
+ mode does not match, the image is converted to the mode of
+ the bitmap image.
+ """
+ # convert to blittable
+ im.load()
+ image = im.im
+ if image.isblock() and im.mode == self.__mode:
+ block = image
+ else:
+ block = image.new_block(self.__mode, im.size)
+ image.convert2(block, image) # convert directly between buffers
+
+ _pyimagingtkcall("PyImagingPhoto", self.__photo, block.id)
+
+
+# --------------------------------------------------------------------
+# BitmapImage
+
+
+class BitmapImage:
+ """
+ A Tkinter-compatible bitmap image. This can be used everywhere Tkinter
+ expects an image object.
+
+ The given image must have mode "1". Pixels having value 0 are treated as
+ transparent. Options, if any, are passed on to Tkinter. The most commonly
+ used option is ``foreground``, which is used to specify the color for the
+ non-transparent parts. See the Tkinter documentation for information on
+ how to specify colours.
+
+ :param image: A PIL image.
+ """
+
+ def __init__(self, image=None, **kw):
+ # Tk compatibility: file or data
+ if image is None:
+ image = _get_image_from_kw(kw)
+
+ self.__mode = image.mode
+ self.__size = image.size
+
+ if _pilbitmap_check():
+ # fast way (requires the pilbitmap booster patch)
+ image.load()
+ kw["data"] = f"PIL:{image.im.id}"
+ self.__im = image # must keep a reference
+ else:
+ # slow but safe way
+ kw["data"] = image.tobitmap()
+ self.__photo = tkinter.BitmapImage(**kw)
+
+ def __del__(self):
+ name = self.__photo.name
+ self.__photo.name = None
+ try:
+ self.__photo.tk.call("image", "delete", name)
+ except Exception:
+ pass # ignore internal errors
+
+ def width(self):
+ """
+ Get the width of the image.
+
+ :return: The width, in pixels.
+ """
+ return self.__size[0]
+
+ def height(self):
+ """
+ Get the height of the image.
+
+ :return: The height, in pixels.
+ """
+ return self.__size[1]
+
+ def __str__(self):
+ """
+ Get the Tkinter bitmap image identifier. This method is automatically
+ called by Tkinter whenever a BitmapImage object is passed to a Tkinter
+ method.
+
+ :return: A Tkinter bitmap image identifier (a string).
+ """
+ return str(self.__photo)
+
+
+def getimage(photo):
+ """Copies the contents of a PhotoImage to a PIL image memory."""
+ im = Image.new("RGBA", (photo.width(), photo.height()))
+ block = im.im
+
+ _pyimagingtkcall("PyImagingPhotoGet", photo, block.id)
+
+ return im
+
+
+def _show(image, title):
+ """Helper for the Image.show method."""
+
+ class UI(tkinter.Label):
+ def __init__(self, master, im):
+ if im.mode == "1":
+ self.image = BitmapImage(im, foreground="white", master=master)
+ else:
+ self.image = PhotoImage(im, master=master)
+ super().__init__(master, image=self.image, bg="black", bd=0)
+
+ if not tkinter._default_root:
+ msg = "tkinter not initialized"
+ raise OSError(msg)
+ top = tkinter.Toplevel()
+ if title:
+ top.title(title)
+ UI(top, image).pack()
diff --git a/Lib/site-packages/PIL/ImageTransform.py b/Lib/site-packages/PIL/ImageTransform.py
new file mode 100644
index 0000000..84c81f1
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageTransform.py
@@ -0,0 +1,112 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# transform wrappers
+#
+# History:
+# 2002-04-08 fl Created
+#
+# Copyright (c) 2002 by Secret Labs AB
+# Copyright (c) 2002 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+from typing import Sequence
+
+from . import Image
+
+
+class Transform(Image.ImageTransformHandler):
+ method: Image.Transform
+
+ def __init__(self, data: Sequence[int]) -> None:
+ self.data = data
+
+ def getdata(self) -> tuple[int, Sequence[int]]:
+ return self.method, self.data
+
+ def transform(
+ self,
+ size: tuple[int, int],
+ image: Image.Image,
+ **options: dict[str, str | int | tuple[int, ...] | list[int]],
+ ) -> Image.Image:
+ # can be overridden
+ method, data = self.getdata()
+ return image.transform(size, method, data, **options)
+
+
+class AffineTransform(Transform):
+ """
+ Define an affine image transform.
+
+ This function takes a 6-tuple (a, b, c, d, e, f) which contain the first
+ two rows from an affine transform matrix. For each pixel (x, y) in the
+ output image, the new value is taken from a position (a x + b y + c,
+ d x + e y + f) in the input image, rounded to nearest pixel.
+
+ This function can be used to scale, translate, rotate, and shear the
+ original image.
+
+ See :py:meth:`~PIL.Image.Image.transform`
+
+ :param matrix: A 6-tuple (a, b, c, d, e, f) containing the first two rows
+ from an affine transform matrix.
+ """
+
+ method = Image.Transform.AFFINE
+
+
+class ExtentTransform(Transform):
+ """
+ Define a transform to extract a subregion from an image.
+
+ Maps a rectangle (defined by two corners) from the image to a rectangle of
+ the given size. The resulting image will contain data sampled from between
+ the corners, such that (x0, y0) in the input image will end up at (0,0) in
+ the output image, and (x1, y1) at size.
+
+ This method can be used to crop, stretch, shrink, or mirror an arbitrary
+ rectangle in the current image. It is slightly slower than crop, but about
+ as fast as a corresponding resize operation.
+
+ See :py:meth:`~PIL.Image.Image.transform`
+
+ :param bbox: A 4-tuple (x0, y0, x1, y1) which specifies two points in the
+ input image's coordinate system. See :ref:`coordinate-system`.
+ """
+
+ method = Image.Transform.EXTENT
+
+
+class QuadTransform(Transform):
+ """
+ Define a quad image transform.
+
+ Maps a quadrilateral (a region defined by four corners) from the image to a
+ rectangle of the given size.
+
+ See :py:meth:`~PIL.Image.Image.transform`
+
+ :param xy: An 8-tuple (x0, y0, x1, y1, x2, y2, x3, y3) which contain the
+ upper left, lower left, lower right, and upper right corner of the
+ source quadrilateral.
+ """
+
+ method = Image.Transform.QUAD
+
+
+class MeshTransform(Transform):
+ """
+ Define a mesh image transform. A mesh transform consists of one or more
+ individual quad transforms.
+
+ See :py:meth:`~PIL.Image.Image.transform`
+
+ :param data: A list of (bbox, quad) tuples.
+ """
+
+ method = Image.Transform.MESH
diff --git a/Lib/site-packages/PIL/ImageWin.py b/Lib/site-packages/PIL/ImageWin.py
new file mode 100644
index 0000000..75910d2
--- /dev/null
+++ b/Lib/site-packages/PIL/ImageWin.py
@@ -0,0 +1,231 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# a Windows DIB display interface
+#
+# History:
+# 1996-05-20 fl Created
+# 1996-09-20 fl Fixed subregion exposure
+# 1997-09-21 fl Added draw primitive (for tzPrint)
+# 2003-05-21 fl Added experimental Window/ImageWindow classes
+# 2003-09-05 fl Added fromstring/tostring methods
+#
+# Copyright (c) Secret Labs AB 1997-2003.
+# Copyright (c) Fredrik Lundh 1996-2003.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+from . import Image
+
+
+class HDC:
+ """
+ Wraps an HDC integer. The resulting object can be passed to the
+ :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose`
+ methods.
+ """
+
+ def __init__(self, dc):
+ self.dc = dc
+
+ def __int__(self):
+ return self.dc
+
+
+class HWND:
+ """
+ Wraps an HWND integer. The resulting object can be passed to the
+ :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose`
+ methods, instead of a DC.
+ """
+
+ def __init__(self, wnd):
+ self.wnd = wnd
+
+ def __int__(self):
+ return self.wnd
+
+
+class Dib:
+ """
+ A Windows bitmap with the given mode and size. The mode can be one of "1",
+ "L", "P", or "RGB".
+
+ If the display requires a palette, this constructor creates a suitable
+ palette and associates it with the image. For an "L" image, 128 graylevels
+ are allocated. For an "RGB" image, a 6x6x6 colour cube is used, together
+ with 20 graylevels.
+
+ To make sure that palettes work properly under Windows, you must call the
+ ``palette`` method upon certain events from Windows.
+
+ :param image: Either a PIL image, or a mode string. If a mode string is
+ used, a size must also be given. The mode can be one of "1",
+ "L", "P", or "RGB".
+ :param size: If the first argument is a mode string, this
+ defines the size of the image.
+ """
+
+ def __init__(self, image, size=None):
+ if hasattr(image, "mode") and hasattr(image, "size"):
+ mode = image.mode
+ size = image.size
+ else:
+ mode = image
+ image = None
+ if mode not in ["1", "L", "P", "RGB"]:
+ mode = Image.getmodebase(mode)
+ self.image = Image.core.display(mode, size)
+ self.mode = mode
+ self.size = size
+ if image:
+ self.paste(image)
+
+ def expose(self, handle):
+ """
+ Copy the bitmap contents to a device context.
+
+ :param handle: Device context (HDC), cast to a Python integer, or an
+ HDC or HWND instance. In PythonWin, you can use
+ ``CDC.GetHandleAttrib()`` to get a suitable handle.
+ """
+ if isinstance(handle, HWND):
+ dc = self.image.getdc(handle)
+ try:
+ result = self.image.expose(dc)
+ finally:
+ self.image.releasedc(handle, dc)
+ else:
+ result = self.image.expose(handle)
+ return result
+
+ def draw(self, handle, dst, src=None):
+ """
+ Same as expose, but allows you to specify where to draw the image, and
+ what part of it to draw.
+
+ The destination and source areas are given as 4-tuple rectangles. If
+ the source is omitted, the entire image is copied. If the source and
+ the destination have different sizes, the image is resized as
+ necessary.
+ """
+ if not src:
+ src = (0, 0) + self.size
+ if isinstance(handle, HWND):
+ dc = self.image.getdc(handle)
+ try:
+ result = self.image.draw(dc, dst, src)
+ finally:
+ self.image.releasedc(handle, dc)
+ else:
+ result = self.image.draw(handle, dst, src)
+ return result
+
+ def query_palette(self, handle):
+ """
+ Installs the palette associated with the image in the given device
+ context.
+
+ This method should be called upon **QUERYNEWPALETTE** and
+ **PALETTECHANGED** events from Windows. If this method returns a
+ non-zero value, one or more display palette entries were changed, and
+ the image should be redrawn.
+
+ :param handle: Device context (HDC), cast to a Python integer, or an
+ HDC or HWND instance.
+ :return: A true value if one or more entries were changed (this
+ indicates that the image should be redrawn).
+ """
+ if isinstance(handle, HWND):
+ handle = self.image.getdc(handle)
+ try:
+ result = self.image.query_palette(handle)
+ finally:
+ self.image.releasedc(handle, handle)
+ else:
+ result = self.image.query_palette(handle)
+ return result
+
+ def paste(self, im, box=None):
+ """
+ Paste a PIL image into the bitmap image.
+
+ :param im: A PIL image. The size must match the target region.
+ If the mode does not match, the image is converted to the
+ mode of the bitmap image.
+ :param box: A 4-tuple defining the left, upper, right, and
+ lower pixel coordinate. See :ref:`coordinate-system`. If
+ None is given instead of a tuple, all of the image is
+ assumed.
+ """
+ im.load()
+ if self.mode != im.mode:
+ im = im.convert(self.mode)
+ if box:
+ self.image.paste(im.im, box)
+ else:
+ self.image.paste(im.im)
+
+ def frombytes(self, buffer):
+ """
+ Load display memory contents from byte data.
+
+ :param buffer: A buffer containing display data (usually
+ data returned from :py:func:`~PIL.ImageWin.Dib.tobytes`)
+ """
+ return self.image.frombytes(buffer)
+
+ def tobytes(self):
+ """
+ Copy display memory contents to bytes object.
+
+ :return: A bytes object containing display data.
+ """
+ return self.image.tobytes()
+
+
+class Window:
+ """Create a Window with the given title size."""
+
+ def __init__(self, title="PIL", width=None, height=None):
+ self.hwnd = Image.core.createwindow(
+ title, self.__dispatcher, width or 0, height or 0
+ )
+
+ def __dispatcher(self, action, *args):
+ return getattr(self, "ui_handle_" + action)(*args)
+
+ def ui_handle_clear(self, dc, x0, y0, x1, y1):
+ pass
+
+ def ui_handle_damage(self, x0, y0, x1, y1):
+ pass
+
+ def ui_handle_destroy(self):
+ pass
+
+ def ui_handle_repair(self, dc, x0, y0, x1, y1):
+ pass
+
+ def ui_handle_resize(self, width, height):
+ pass
+
+ def mainloop(self):
+ Image.core.eventloop()
+
+
+class ImageWindow(Window):
+ """Create an image window which displays the given image."""
+
+ def __init__(self, image, title="PIL"):
+ if not isinstance(image, Dib):
+ image = Dib(image)
+ self.image = image
+ width, height = image.size
+ super().__init__(title, width=width, height=height)
+
+ def ui_handle_repair(self, dc, x0, y0, x1, y1):
+ self.image.draw(dc, (x0, y0, x1, y1))
diff --git a/Lib/site-packages/PIL/ImtImagePlugin.py b/Lib/site-packages/PIL/ImtImagePlugin.py
new file mode 100644
index 0000000..7469c59
--- /dev/null
+++ b/Lib/site-packages/PIL/ImtImagePlugin.py
@@ -0,0 +1,101 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# IM Tools support for PIL
+#
+# history:
+# 1996-05-27 fl Created (read 8-bit images only)
+# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.2)
+#
+# Copyright (c) Secret Labs AB 1997-2001.
+# Copyright (c) Fredrik Lundh 1996-2001.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import re
+
+from . import Image, ImageFile
+
+#
+# --------------------------------------------------------------------
+
+field = re.compile(rb"([a-z]*) ([^ \r\n]*)")
+
+
+##
+# Image plugin for IM Tools images.
+
+
+class ImtImageFile(ImageFile.ImageFile):
+ format = "IMT"
+ format_description = "IM Tools"
+
+ def _open(self):
+ # Quick rejection: if there's not a LF among the first
+ # 100 bytes, this is (probably) not a text header.
+
+ buffer = self.fp.read(100)
+ if b"\n" not in buffer:
+ msg = "not an IM file"
+ raise SyntaxError(msg)
+
+ xsize = ysize = 0
+
+ while True:
+ if buffer:
+ s = buffer[:1]
+ buffer = buffer[1:]
+ else:
+ s = self.fp.read(1)
+ if not s:
+ break
+
+ if s == b"\x0C":
+ # image data begins
+ self.tile = [
+ (
+ "raw",
+ (0, 0) + self.size,
+ self.fp.tell() - len(buffer),
+ (self.mode, 0, 1),
+ )
+ ]
+
+ break
+
+ else:
+ # read key/value pair
+ if b"\n" not in buffer:
+ buffer += self.fp.read(100)
+ lines = buffer.split(b"\n")
+ s += lines.pop(0)
+ buffer = b"\n".join(lines)
+ if len(s) == 1 or len(s) > 100:
+ break
+ if s[0] == ord(b"*"):
+ continue # comment
+
+ m = field.match(s)
+ if not m:
+ break
+ k, v = m.group(1, 2)
+ if k == b"width":
+ xsize = int(v)
+ self._size = xsize, ysize
+ elif k == b"height":
+ ysize = int(v)
+ self._size = xsize, ysize
+ elif k == b"pixel" and v == b"n8":
+ self._mode = "L"
+
+
+#
+# --------------------------------------------------------------------
+
+Image.register_open(ImtImageFile.format, ImtImageFile)
+
+#
+# no extension registered (".im" is simply too common)
diff --git a/Lib/site-packages/PIL/IptcImagePlugin.py b/Lib/site-packages/PIL/IptcImagePlugin.py
new file mode 100644
index 0000000..4096094
--- /dev/null
+++ b/Lib/site-packages/PIL/IptcImagePlugin.py
@@ -0,0 +1,235 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# IPTC/NAA file handling
+#
+# history:
+# 1995-10-01 fl Created
+# 1998-03-09 fl Cleaned up and added to PIL
+# 2002-06-18 fl Added getiptcinfo helper
+#
+# Copyright (c) Secret Labs AB 1997-2002.
+# Copyright (c) Fredrik Lundh 1995.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+from io import BytesIO
+from typing import Sequence
+
+from . import Image, ImageFile
+from ._binary import i16be as i16
+from ._binary import i32be as i32
+from ._deprecate import deprecate
+
+COMPRESSION = {1: "raw", 5: "jpeg"}
+
+
+def __getattr__(name: str) -> bytes:
+ if name == "PAD":
+ deprecate("IptcImagePlugin.PAD", 12)
+ return b"\0\0\0\0"
+ msg = f"module '{__name__}' has no attribute '{name}'"
+ raise AttributeError(msg)
+
+
+#
+# Helpers
+
+
+def _i(c: bytes) -> int:
+ return i32((b"\0\0\0\0" + c)[-4:])
+
+
+def _i8(c: int | bytes) -> int:
+ return c if isinstance(c, int) else c[0]
+
+
+def i(c: bytes) -> int:
+ """.. deprecated:: 10.2.0"""
+ deprecate("IptcImagePlugin.i", 12)
+ return _i(c)
+
+
+def dump(c: Sequence[int | bytes]) -> None:
+ """.. deprecated:: 10.2.0"""
+ deprecate("IptcImagePlugin.dump", 12)
+ for i in c:
+ print("%02x" % _i8(i), end=" ")
+ print()
+
+
+##
+# Image plugin for IPTC/NAA datastreams. To read IPTC/NAA fields
+# from TIFF and JPEG files, use the getiptcinfo function.
+
+
+class IptcImageFile(ImageFile.ImageFile):
+ format = "IPTC"
+ format_description = "IPTC/NAA"
+
+ def getint(self, key: tuple[int, int]) -> int:
+ return _i(self.info[key])
+
+ def field(self) -> tuple[tuple[int, int] | None, int]:
+ #
+ # get a IPTC field header
+ s = self.fp.read(5)
+ if not s.strip(b"\x00"):
+ return None, 0
+
+ tag = s[1], s[2]
+
+ # syntax
+ if s[0] != 0x1C or tag[0] not in [1, 2, 3, 4, 5, 6, 7, 8, 9, 240]:
+ msg = "invalid IPTC/NAA file"
+ raise SyntaxError(msg)
+
+ # field size
+ size = s[3]
+ if size > 132:
+ msg = "illegal field length in IPTC/NAA file"
+ raise OSError(msg)
+ elif size == 128:
+ size = 0
+ elif size > 128:
+ size = _i(self.fp.read(size - 128))
+ else:
+ size = i16(s, 3)
+
+ return tag, size
+
+ def _open(self) -> None:
+ # load descriptive fields
+ while True:
+ offset = self.fp.tell()
+ tag, size = self.field()
+ if not tag or tag == (8, 10):
+ break
+ if size:
+ tagdata = self.fp.read(size)
+ else:
+ tagdata = None
+ if tag in self.info:
+ if isinstance(self.info[tag], list):
+ self.info[tag].append(tagdata)
+ else:
+ self.info[tag] = [self.info[tag], tagdata]
+ else:
+ self.info[tag] = tagdata
+
+ # mode
+ layers = self.info[(3, 60)][0]
+ component = self.info[(3, 60)][1]
+ if (3, 65) in self.info:
+ id = self.info[(3, 65)][0] - 1
+ else:
+ id = 0
+ if layers == 1 and not component:
+ self._mode = "L"
+ elif layers == 3 and component:
+ self._mode = "RGB"[id]
+ elif layers == 4 and component:
+ self._mode = "CMYK"[id]
+
+ # size
+ self._size = self.getint((3, 20)), self.getint((3, 30))
+
+ # compression
+ try:
+ compression = COMPRESSION[self.getint((3, 120))]
+ except KeyError as e:
+ msg = "Unknown IPTC image compression"
+ raise OSError(msg) from e
+
+ # tile
+ if tag == (8, 10):
+ self.tile = [("iptc", (0, 0) + self.size, offset, compression)]
+
+ def load(self):
+ if len(self.tile) != 1 or self.tile[0][0] != "iptc":
+ return ImageFile.ImageFile.load(self)
+
+ offset, compression = self.tile[0][2:]
+
+ self.fp.seek(offset)
+
+ # Copy image data to temporary file
+ o = BytesIO()
+ if compression == "raw":
+ # To simplify access to the extracted file,
+ # prepend a PPM header
+ o.write(b"P5\n%d %d\n255\n" % self.size)
+ while True:
+ type, size = self.field()
+ if type != (8, 10):
+ break
+ while size > 0:
+ s = self.fp.read(min(size, 8192))
+ if not s:
+ break
+ o.write(s)
+ size -= len(s)
+
+ with Image.open(o) as _im:
+ _im.load()
+ self.im = _im.im
+
+
+Image.register_open(IptcImageFile.format, IptcImageFile)
+
+Image.register_extension(IptcImageFile.format, ".iim")
+
+
+def getiptcinfo(im):
+ """
+ Get IPTC information from TIFF, JPEG, or IPTC file.
+
+ :param im: An image containing IPTC data.
+ :returns: A dictionary containing IPTC information, or None if
+ no IPTC information block was found.
+ """
+ from . import JpegImagePlugin, TiffImagePlugin
+
+ data = None
+
+ if isinstance(im, IptcImageFile):
+ # return info dictionary right away
+ return im.info
+
+ elif isinstance(im, JpegImagePlugin.JpegImageFile):
+ # extract the IPTC/NAA resource
+ photoshop = im.info.get("photoshop")
+ if photoshop:
+ data = photoshop.get(0x0404)
+
+ elif isinstance(im, TiffImagePlugin.TiffImageFile):
+ # get raw data from the IPTC/NAA tag (PhotoShop tags the data
+ # as 4-byte integers, so we cannot use the get method...)
+ try:
+ data = im.tag.tagdata[TiffImagePlugin.IPTC_NAA_CHUNK]
+ except (AttributeError, KeyError):
+ pass
+
+ if data is None:
+ return None # no properties
+
+ # create an IptcImagePlugin object without initializing it
+ class FakeImage:
+ pass
+
+ im = FakeImage()
+ im.__class__ = IptcImageFile
+
+ # parse the IPTC information chunk
+ im.info = {}
+ im.fp = BytesIO(data)
+
+ try:
+ im._open()
+ except (IndexError, KeyError):
+ pass # expected failure
+
+ return im.info
diff --git a/Lib/site-packages/PIL/Jpeg2KImagePlugin.py b/Lib/site-packages/PIL/Jpeg2KImagePlugin.py
new file mode 100644
index 0000000..4b778a0
--- /dev/null
+++ b/Lib/site-packages/PIL/Jpeg2KImagePlugin.py
@@ -0,0 +1,398 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# JPEG2000 file handling
+#
+# History:
+# 2014-03-12 ajh Created
+# 2021-06-30 rogermb Extract dpi information from the 'resc' header box
+#
+# Copyright (c) 2014 Coriolis Systems Limited
+# Copyright (c) 2014 Alastair Houghton
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import io
+import os
+import struct
+
+from . import Image, ImageFile, _binary
+
+
+class BoxReader:
+ """
+ A small helper class to read fields stored in JPEG2000 header boxes
+ and to easily step into and read sub-boxes.
+ """
+
+ def __init__(self, fp, length=-1):
+ self.fp = fp
+ self.has_length = length >= 0
+ self.length = length
+ self.remaining_in_box = -1
+
+ def _can_read(self, num_bytes):
+ if self.has_length and self.fp.tell() + num_bytes > self.length:
+ # Outside box: ensure we don't read past the known file length
+ return False
+ if self.remaining_in_box >= 0:
+ # Inside box contents: ensure read does not go past box boundaries
+ return num_bytes <= self.remaining_in_box
+ else:
+ return True # No length known, just read
+
+ def _read_bytes(self, num_bytes):
+ if not self._can_read(num_bytes):
+ msg = "Not enough data in header"
+ raise SyntaxError(msg)
+
+ data = self.fp.read(num_bytes)
+ if len(data) < num_bytes:
+ msg = f"Expected to read {num_bytes} bytes but only got {len(data)}."
+ raise OSError(msg)
+
+ if self.remaining_in_box > 0:
+ self.remaining_in_box -= num_bytes
+ return data
+
+ def read_fields(self, field_format):
+ size = struct.calcsize(field_format)
+ data = self._read_bytes(size)
+ return struct.unpack(field_format, data)
+
+ def read_boxes(self):
+ size = self.remaining_in_box
+ data = self._read_bytes(size)
+ return BoxReader(io.BytesIO(data), size)
+
+ def has_next_box(self):
+ if self.has_length:
+ return self.fp.tell() + self.remaining_in_box < self.length
+ else:
+ return True
+
+ def next_box_type(self):
+ # Skip the rest of the box if it has not been read
+ if self.remaining_in_box > 0:
+ self.fp.seek(self.remaining_in_box, os.SEEK_CUR)
+ self.remaining_in_box = -1
+
+ # Read the length and type of the next box
+ lbox, tbox = self.read_fields(">I4s")
+ if lbox == 1:
+ lbox = self.read_fields(">Q")[0]
+ hlen = 16
+ else:
+ hlen = 8
+
+ if lbox < hlen or not self._can_read(lbox - hlen):
+ msg = "Invalid header length"
+ raise SyntaxError(msg)
+
+ self.remaining_in_box = lbox - hlen
+ return tbox
+
+
+def _parse_codestream(fp):
+ """Parse the JPEG 2000 codestream to extract the size and component
+ count from the SIZ marker segment, returning a PIL (size, mode) tuple."""
+
+ hdr = fp.read(2)
+ lsiz = _binary.i16be(hdr)
+ siz = hdr + fp.read(lsiz - 2)
+ lsiz, rsiz, xsiz, ysiz, xosiz, yosiz, _, _, _, _, csiz = struct.unpack_from(
+ ">HHIIIIIIIIH", siz
+ )
+ ssiz = [None] * csiz
+ xrsiz = [None] * csiz
+ yrsiz = [None] * csiz
+ for i in range(csiz):
+ ssiz[i], xrsiz[i], yrsiz[i] = struct.unpack_from(">BBB", siz, 36 + 3 * i)
+
+ size = (xsiz - xosiz, ysiz - yosiz)
+ if csiz == 1:
+ if (yrsiz[0] & 0x7F) > 8:
+ mode = "I;16"
+ else:
+ mode = "L"
+ elif csiz == 2:
+ mode = "LA"
+ elif csiz == 3:
+ mode = "RGB"
+ elif csiz == 4:
+ mode = "RGBA"
+ else:
+ mode = None
+
+ return size, mode
+
+
+def _res_to_dpi(num, denom, exp):
+ """Convert JPEG2000's (numerator, denominator, exponent-base-10) resolution,
+ calculated as (num / denom) * 10^exp and stored in dots per meter,
+ to floating-point dots per inch."""
+ if denom != 0:
+ return (254 * num * (10**exp)) / (10000 * denom)
+
+
+def _parse_jp2_header(fp):
+ """Parse the JP2 header box to extract size, component count,
+ color space information, and optionally DPI information,
+ returning a (size, mode, mimetype, dpi) tuple."""
+
+ # Find the JP2 header box
+ reader = BoxReader(fp)
+ header = None
+ mimetype = None
+ while reader.has_next_box():
+ tbox = reader.next_box_type()
+
+ if tbox == b"jp2h":
+ header = reader.read_boxes()
+ break
+ elif tbox == b"ftyp":
+ if reader.read_fields(">4s")[0] == b"jpx ":
+ mimetype = "image/jpx"
+
+ size = None
+ mode = None
+ bpc = None
+ nc = None
+ dpi = None # 2-tuple of DPI info, or None
+
+ while header.has_next_box():
+ tbox = header.next_box_type()
+
+ if tbox == b"ihdr":
+ height, width, nc, bpc = header.read_fields(">IIHB")
+ size = (width, height)
+ if nc == 1 and (bpc & 0x7F) > 8:
+ mode = "I;16"
+ elif nc == 1:
+ mode = "L"
+ elif nc == 2:
+ mode = "LA"
+ elif nc == 3:
+ mode = "RGB"
+ elif nc == 4:
+ mode = "RGBA"
+ elif tbox == b"res ":
+ res = header.read_boxes()
+ while res.has_next_box():
+ tres = res.next_box_type()
+ if tres == b"resc":
+ vrcn, vrcd, hrcn, hrcd, vrce, hrce = res.read_fields(">HHHHBB")
+ hres = _res_to_dpi(hrcn, hrcd, hrce)
+ vres = _res_to_dpi(vrcn, vrcd, vrce)
+ if hres is not None and vres is not None:
+ dpi = (hres, vres)
+ break
+
+ if size is None or mode is None:
+ msg = "Malformed JP2 header"
+ raise SyntaxError(msg)
+
+ return size, mode, mimetype, dpi
+
+
+##
+# Image plugin for JPEG2000 images.
+
+
+class Jpeg2KImageFile(ImageFile.ImageFile):
+ format = "JPEG2000"
+ format_description = "JPEG 2000 (ISO 15444)"
+
+ def _open(self):
+ sig = self.fp.read(4)
+ if sig == b"\xff\x4f\xff\x51":
+ self.codec = "j2k"
+ self._size, self._mode = _parse_codestream(self.fp)
+ else:
+ sig = sig + self.fp.read(8)
+
+ if sig == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a":
+ self.codec = "jp2"
+ header = _parse_jp2_header(self.fp)
+ self._size, self._mode, self.custom_mimetype, dpi = header
+ if dpi is not None:
+ self.info["dpi"] = dpi
+ if self.fp.read(12).endswith(b"jp2c\xff\x4f\xff\x51"):
+ self._parse_comment()
+ else:
+ msg = "not a JPEG 2000 file"
+ raise SyntaxError(msg)
+
+ if self.size is None or self.mode is None:
+ msg = "unable to determine size/mode"
+ raise SyntaxError(msg)
+
+ self._reduce = 0
+ self.layers = 0
+
+ fd = -1
+ length = -1
+
+ try:
+ fd = self.fp.fileno()
+ length = os.fstat(fd).st_size
+ except Exception:
+ fd = -1
+ try:
+ pos = self.fp.tell()
+ self.fp.seek(0, io.SEEK_END)
+ length = self.fp.tell()
+ self.fp.seek(pos)
+ except Exception:
+ length = -1
+
+ self.tile = [
+ (
+ "jpeg2k",
+ (0, 0) + self.size,
+ 0,
+ (self.codec, self._reduce, self.layers, fd, length),
+ )
+ ]
+
+ def _parse_comment(self):
+ hdr = self.fp.read(2)
+ length = _binary.i16be(hdr)
+ self.fp.seek(length - 2, os.SEEK_CUR)
+
+ while True:
+ marker = self.fp.read(2)
+ if not marker:
+ break
+ typ = marker[1]
+ if typ in (0x90, 0xD9):
+ # Start of tile or end of codestream
+ break
+ hdr = self.fp.read(2)
+ length = _binary.i16be(hdr)
+ if typ == 0x64:
+ # Comment
+ self.info["comment"] = self.fp.read(length - 2)[2:]
+ break
+ else:
+ self.fp.seek(length - 2, os.SEEK_CUR)
+
+ @property
+ def reduce(self):
+ # https://github.com/python-pillow/Pillow/issues/4343 found that the
+ # new Image 'reduce' method was shadowed by this plugin's 'reduce'
+ # property. This attempts to allow for both scenarios
+ return self._reduce or super().reduce
+
+ @reduce.setter
+ def reduce(self, value):
+ self._reduce = value
+
+ def load(self):
+ if self.tile and self._reduce:
+ power = 1 << self._reduce
+ adjust = power >> 1
+ self._size = (
+ int((self.size[0] + adjust) / power),
+ int((self.size[1] + adjust) / power),
+ )
+
+ # Update the reduce and layers settings
+ t = self.tile[0]
+ t3 = (t[3][0], self._reduce, self.layers, t[3][3], t[3][4])
+ self.tile = [(t[0], (0, 0) + self.size, t[2], t3)]
+
+ return ImageFile.ImageFile.load(self)
+
+
+def _accept(prefix):
+ return (
+ prefix[:4] == b"\xff\x4f\xff\x51"
+ or prefix[:12] == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a"
+ )
+
+
+# ------------------------------------------------------------
+# Save support
+
+
+def _save(im, fp, filename):
+ # Get the keyword arguments
+ info = im.encoderinfo
+
+ if filename.endswith(".j2k") or info.get("no_jp2", False):
+ kind = "j2k"
+ else:
+ kind = "jp2"
+
+ offset = info.get("offset", None)
+ tile_offset = info.get("tile_offset", None)
+ tile_size = info.get("tile_size", None)
+ quality_mode = info.get("quality_mode", "rates")
+ quality_layers = info.get("quality_layers", None)
+ if quality_layers is not None and not (
+ isinstance(quality_layers, (list, tuple))
+ and all(
+ isinstance(quality_layer, (int, float)) for quality_layer in quality_layers
+ )
+ ):
+ msg = "quality_layers must be a sequence of numbers"
+ raise ValueError(msg)
+
+ num_resolutions = info.get("num_resolutions", 0)
+ cblk_size = info.get("codeblock_size", None)
+ precinct_size = info.get("precinct_size", None)
+ irreversible = info.get("irreversible", False)
+ progression = info.get("progression", "LRCP")
+ cinema_mode = info.get("cinema_mode", "no")
+ mct = info.get("mct", 0)
+ signed = info.get("signed", False)
+ comment = info.get("comment")
+ if isinstance(comment, str):
+ comment = comment.encode()
+ plt = info.get("plt", False)
+
+ fd = -1
+ if hasattr(fp, "fileno"):
+ try:
+ fd = fp.fileno()
+ except Exception:
+ fd = -1
+
+ im.encoderconfig = (
+ offset,
+ tile_offset,
+ tile_size,
+ quality_mode,
+ quality_layers,
+ num_resolutions,
+ cblk_size,
+ precinct_size,
+ irreversible,
+ progression,
+ cinema_mode,
+ mct,
+ signed,
+ fd,
+ comment,
+ plt,
+ )
+
+ ImageFile._save(im, fp, [("jpeg2k", (0, 0) + im.size, 0, kind)])
+
+
+# ------------------------------------------------------------
+# Registry stuff
+
+
+Image.register_open(Jpeg2KImageFile.format, Jpeg2KImageFile, _accept)
+Image.register_save(Jpeg2KImageFile.format, _save)
+
+Image.register_extensions(
+ Jpeg2KImageFile.format, [".jp2", ".j2k", ".jpc", ".jpf", ".jpx", ".j2c"]
+)
+
+Image.register_mime(Jpeg2KImageFile.format, "image/jp2")
diff --git a/Lib/site-packages/PIL/JpegImagePlugin.py b/Lib/site-packages/PIL/JpegImagePlugin.py
new file mode 100644
index 0000000..81b8749
--- /dev/null
+++ b/Lib/site-packages/PIL/JpegImagePlugin.py
@@ -0,0 +1,868 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# JPEG (JFIF) file handling
+#
+# See "Digital Compression and Coding of Continuous-Tone Still Images,
+# Part 1, Requirements and Guidelines" (CCITT T.81 / ISO 10918-1)
+#
+# History:
+# 1995-09-09 fl Created
+# 1995-09-13 fl Added full parser
+# 1996-03-25 fl Added hack to use the IJG command line utilities
+# 1996-05-05 fl Workaround Photoshop 2.5 CMYK polarity bug
+# 1996-05-28 fl Added draft support, JFIF version (0.1)
+# 1996-12-30 fl Added encoder options, added progression property (0.2)
+# 1997-08-27 fl Save mode 1 images as BW (0.3)
+# 1998-07-12 fl Added YCbCr to draft and save methods (0.4)
+# 1998-10-19 fl Don't hang on files using 16-bit DQT's (0.4.1)
+# 2001-04-16 fl Extract DPI settings from JFIF files (0.4.2)
+# 2002-07-01 fl Skip pad bytes before markers; identify Exif files (0.4.3)
+# 2003-04-25 fl Added experimental EXIF decoder (0.5)
+# 2003-06-06 fl Added experimental EXIF GPSinfo decoder
+# 2003-09-13 fl Extract COM markers
+# 2009-09-06 fl Added icc_profile support (from Florian Hoech)
+# 2009-03-06 fl Changed CMYK handling; always use Adobe polarity (0.6)
+# 2009-03-08 fl Added subsampling support (from Justin Huff).
+#
+# Copyright (c) 1997-2003 by Secret Labs AB.
+# Copyright (c) 1995-1996 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import array
+import io
+import math
+import os
+import struct
+import subprocess
+import sys
+import tempfile
+import warnings
+
+from . import Image, ImageFile
+from ._binary import i16be as i16
+from ._binary import i32be as i32
+from ._binary import o8
+from ._binary import o16be as o16
+from .JpegPresets import presets
+
+#
+# Parser
+
+
+def Skip(self, marker):
+ n = i16(self.fp.read(2)) - 2
+ ImageFile._safe_read(self.fp, n)
+
+
+def APP(self, marker):
+ #
+ # Application marker. Store these in the APP dictionary.
+ # Also look for well-known application markers.
+
+ n = i16(self.fp.read(2)) - 2
+ s = ImageFile._safe_read(self.fp, n)
+
+ app = "APP%d" % (marker & 15)
+
+ self.app[app] = s # compatibility
+ self.applist.append((app, s))
+
+ if marker == 0xFFE0 and s[:4] == b"JFIF":
+ # extract JFIF information
+ self.info["jfif"] = version = i16(s, 5) # version
+ self.info["jfif_version"] = divmod(version, 256)
+ # extract JFIF properties
+ try:
+ jfif_unit = s[7]
+ jfif_density = i16(s, 8), i16(s, 10)
+ except Exception:
+ pass
+ else:
+ if jfif_unit == 1:
+ self.info["dpi"] = jfif_density
+ self.info["jfif_unit"] = jfif_unit
+ self.info["jfif_density"] = jfif_density
+ elif marker == 0xFFE1 and s[:6] == b"Exif\0\0":
+ # extract EXIF information
+ if "exif" in self.info:
+ self.info["exif"] += s[6:]
+ else:
+ self.info["exif"] = s
+ self._exif_offset = self.fp.tell() - n + 6
+ elif marker == 0xFFE2 and s[:5] == b"FPXR\0":
+ # extract FlashPix information (incomplete)
+ self.info["flashpix"] = s # FIXME: value will change
+ elif marker == 0xFFE2 and s[:12] == b"ICC_PROFILE\0":
+ # Since an ICC profile can be larger than the maximum size of
+ # a JPEG marker (64K), we need provisions to split it into
+ # multiple markers. The format defined by the ICC specifies
+ # one or more APP2 markers containing the following data:
+ # Identifying string ASCII "ICC_PROFILE\0" (12 bytes)
+ # Marker sequence number 1, 2, etc (1 byte)
+ # Number of markers Total of APP2's used (1 byte)
+ # Profile data (remainder of APP2 data)
+ # Decoders should use the marker sequence numbers to
+ # reassemble the profile, rather than assuming that the APP2
+ # markers appear in the correct sequence.
+ self.icclist.append(s)
+ elif marker == 0xFFED and s[:14] == b"Photoshop 3.0\x00":
+ # parse the image resource block
+ offset = 14
+ photoshop = self.info.setdefault("photoshop", {})
+ while s[offset : offset + 4] == b"8BIM":
+ try:
+ offset += 4
+ # resource code
+ code = i16(s, offset)
+ offset += 2
+ # resource name (usually empty)
+ name_len = s[offset]
+ # name = s[offset+1:offset+1+name_len]
+ offset += 1 + name_len
+ offset += offset & 1 # align
+ # resource data block
+ size = i32(s, offset)
+ offset += 4
+ data = s[offset : offset + size]
+ if code == 0x03ED: # ResolutionInfo
+ data = {
+ "XResolution": i32(data, 0) / 65536,
+ "DisplayedUnitsX": i16(data, 4),
+ "YResolution": i32(data, 8) / 65536,
+ "DisplayedUnitsY": i16(data, 12),
+ }
+ photoshop[code] = data
+ offset += size
+ offset += offset & 1 # align
+ except struct.error:
+ break # insufficient data
+
+ elif marker == 0xFFEE and s[:5] == b"Adobe":
+ self.info["adobe"] = i16(s, 5)
+ # extract Adobe custom properties
+ try:
+ adobe_transform = s[11]
+ except IndexError:
+ pass
+ else:
+ self.info["adobe_transform"] = adobe_transform
+ elif marker == 0xFFE2 and s[:4] == b"MPF\0":
+ # extract MPO information
+ self.info["mp"] = s[4:]
+ # offset is current location minus buffer size
+ # plus constant header size
+ self.info["mpoffset"] = self.fp.tell() - n + 4
+
+ # If DPI isn't in JPEG header, fetch from EXIF
+ if "dpi" not in self.info and "exif" in self.info:
+ try:
+ exif = self.getexif()
+ resolution_unit = exif[0x0128]
+ x_resolution = exif[0x011A]
+ try:
+ dpi = float(x_resolution[0]) / x_resolution[1]
+ except TypeError:
+ dpi = x_resolution
+ if math.isnan(dpi):
+ msg = "DPI is not a number"
+ raise ValueError(msg)
+ if resolution_unit == 3: # cm
+ # 1 dpcm = 2.54 dpi
+ dpi *= 2.54
+ self.info["dpi"] = dpi, dpi
+ except (
+ struct.error,
+ KeyError,
+ SyntaxError,
+ TypeError,
+ ValueError,
+ ZeroDivisionError,
+ ):
+ # struct.error for truncated EXIF
+ # KeyError for dpi not included
+ # SyntaxError for invalid/unreadable EXIF
+ # ValueError or TypeError for dpi being an invalid float
+ # ZeroDivisionError for invalid dpi rational value
+ self.info["dpi"] = 72, 72
+
+
+def COM(self, marker):
+ #
+ # Comment marker. Store these in the APP dictionary.
+ n = i16(self.fp.read(2)) - 2
+ s = ImageFile._safe_read(self.fp, n)
+
+ self.info["comment"] = s
+ self.app["COM"] = s # compatibility
+ self.applist.append(("COM", s))
+
+
+def SOF(self, marker):
+ #
+ # Start of frame marker. Defines the size and mode of the
+ # image. JPEG is colour blind, so we use some simple
+ # heuristics to map the number of layers to an appropriate
+ # mode. Note that this could be made a bit brighter, by
+ # looking for JFIF and Adobe APP markers.
+
+ n = i16(self.fp.read(2)) - 2
+ s = ImageFile._safe_read(self.fp, n)
+ self._size = i16(s, 3), i16(s, 1)
+
+ self.bits = s[0]
+ if self.bits != 8:
+ msg = f"cannot handle {self.bits}-bit layers"
+ raise SyntaxError(msg)
+
+ self.layers = s[5]
+ if self.layers == 1:
+ self._mode = "L"
+ elif self.layers == 3:
+ self._mode = "RGB"
+ elif self.layers == 4:
+ self._mode = "CMYK"
+ else:
+ msg = f"cannot handle {self.layers}-layer images"
+ raise SyntaxError(msg)
+
+ if marker in [0xFFC2, 0xFFC6, 0xFFCA, 0xFFCE]:
+ self.info["progressive"] = self.info["progression"] = 1
+
+ if self.icclist:
+ # fixup icc profile
+ self.icclist.sort() # sort by sequence number
+ if self.icclist[0][13] == len(self.icclist):
+ profile = [p[14:] for p in self.icclist]
+ icc_profile = b"".join(profile)
+ else:
+ icc_profile = None # wrong number of fragments
+ self.info["icc_profile"] = icc_profile
+ self.icclist = []
+
+ for i in range(6, len(s), 3):
+ t = s[i : i + 3]
+ # 4-tuples: id, vsamp, hsamp, qtable
+ self.layer.append((t[0], t[1] // 16, t[1] & 15, t[2]))
+
+
+def DQT(self, marker):
+ #
+ # Define quantization table. Note that there might be more
+ # than one table in each marker.
+
+ # FIXME: The quantization tables can be used to estimate the
+ # compression quality.
+
+ n = i16(self.fp.read(2)) - 2
+ s = ImageFile._safe_read(self.fp, n)
+ while len(s):
+ v = s[0]
+ precision = 1 if (v // 16 == 0) else 2 # in bytes
+ qt_length = 1 + precision * 64
+ if len(s) < qt_length:
+ msg = "bad quantization table marker"
+ raise SyntaxError(msg)
+ data = array.array("B" if precision == 1 else "H", s[1:qt_length])
+ if sys.byteorder == "little" and precision > 1:
+ data.byteswap() # the values are always big-endian
+ self.quantization[v & 15] = [data[i] for i in zigzag_index]
+ s = s[qt_length:]
+
+
+#
+# JPEG marker table
+
+MARKER = {
+ 0xFFC0: ("SOF0", "Baseline DCT", SOF),
+ 0xFFC1: ("SOF1", "Extended Sequential DCT", SOF),
+ 0xFFC2: ("SOF2", "Progressive DCT", SOF),
+ 0xFFC3: ("SOF3", "Spatial lossless", SOF),
+ 0xFFC4: ("DHT", "Define Huffman table", Skip),
+ 0xFFC5: ("SOF5", "Differential sequential DCT", SOF),
+ 0xFFC6: ("SOF6", "Differential progressive DCT", SOF),
+ 0xFFC7: ("SOF7", "Differential spatial", SOF),
+ 0xFFC8: ("JPG", "Extension", None),
+ 0xFFC9: ("SOF9", "Extended sequential DCT (AC)", SOF),
+ 0xFFCA: ("SOF10", "Progressive DCT (AC)", SOF),
+ 0xFFCB: ("SOF11", "Spatial lossless DCT (AC)", SOF),
+ 0xFFCC: ("DAC", "Define arithmetic coding conditioning", Skip),
+ 0xFFCD: ("SOF13", "Differential sequential DCT (AC)", SOF),
+ 0xFFCE: ("SOF14", "Differential progressive DCT (AC)", SOF),
+ 0xFFCF: ("SOF15", "Differential spatial (AC)", SOF),
+ 0xFFD0: ("RST0", "Restart 0", None),
+ 0xFFD1: ("RST1", "Restart 1", None),
+ 0xFFD2: ("RST2", "Restart 2", None),
+ 0xFFD3: ("RST3", "Restart 3", None),
+ 0xFFD4: ("RST4", "Restart 4", None),
+ 0xFFD5: ("RST5", "Restart 5", None),
+ 0xFFD6: ("RST6", "Restart 6", None),
+ 0xFFD7: ("RST7", "Restart 7", None),
+ 0xFFD8: ("SOI", "Start of image", None),
+ 0xFFD9: ("EOI", "End of image", None),
+ 0xFFDA: ("SOS", "Start of scan", Skip),
+ 0xFFDB: ("DQT", "Define quantization table", DQT),
+ 0xFFDC: ("DNL", "Define number of lines", Skip),
+ 0xFFDD: ("DRI", "Define restart interval", Skip),
+ 0xFFDE: ("DHP", "Define hierarchical progression", SOF),
+ 0xFFDF: ("EXP", "Expand reference component", Skip),
+ 0xFFE0: ("APP0", "Application segment 0", APP),
+ 0xFFE1: ("APP1", "Application segment 1", APP),
+ 0xFFE2: ("APP2", "Application segment 2", APP),
+ 0xFFE3: ("APP3", "Application segment 3", APP),
+ 0xFFE4: ("APP4", "Application segment 4", APP),
+ 0xFFE5: ("APP5", "Application segment 5", APP),
+ 0xFFE6: ("APP6", "Application segment 6", APP),
+ 0xFFE7: ("APP7", "Application segment 7", APP),
+ 0xFFE8: ("APP8", "Application segment 8", APP),
+ 0xFFE9: ("APP9", "Application segment 9", APP),
+ 0xFFEA: ("APP10", "Application segment 10", APP),
+ 0xFFEB: ("APP11", "Application segment 11", APP),
+ 0xFFEC: ("APP12", "Application segment 12", APP),
+ 0xFFED: ("APP13", "Application segment 13", APP),
+ 0xFFEE: ("APP14", "Application segment 14", APP),
+ 0xFFEF: ("APP15", "Application segment 15", APP),
+ 0xFFF0: ("JPG0", "Extension 0", None),
+ 0xFFF1: ("JPG1", "Extension 1", None),
+ 0xFFF2: ("JPG2", "Extension 2", None),
+ 0xFFF3: ("JPG3", "Extension 3", None),
+ 0xFFF4: ("JPG4", "Extension 4", None),
+ 0xFFF5: ("JPG5", "Extension 5", None),
+ 0xFFF6: ("JPG6", "Extension 6", None),
+ 0xFFF7: ("JPG7", "Extension 7", None),
+ 0xFFF8: ("JPG8", "Extension 8", None),
+ 0xFFF9: ("JPG9", "Extension 9", None),
+ 0xFFFA: ("JPG10", "Extension 10", None),
+ 0xFFFB: ("JPG11", "Extension 11", None),
+ 0xFFFC: ("JPG12", "Extension 12", None),
+ 0xFFFD: ("JPG13", "Extension 13", None),
+ 0xFFFE: ("COM", "Comment", COM),
+}
+
+
+def _accept(prefix):
+ # Magic number was taken from https://en.wikipedia.org/wiki/JPEG
+ return prefix[:3] == b"\xFF\xD8\xFF"
+
+
+##
+# Image plugin for JPEG and JFIF images.
+
+
+class JpegImageFile(ImageFile.ImageFile):
+ format = "JPEG"
+ format_description = "JPEG (ISO 10918)"
+
+ def _open(self):
+ s = self.fp.read(3)
+
+ if not _accept(s):
+ msg = "not a JPEG file"
+ raise SyntaxError(msg)
+ s = b"\xFF"
+
+ # Create attributes
+ self.bits = self.layers = 0
+
+ # JPEG specifics (internal)
+ self.layer = []
+ self.huffman_dc = {}
+ self.huffman_ac = {}
+ self.quantization = {}
+ self.app = {} # compatibility
+ self.applist = []
+ self.icclist = []
+
+ while True:
+ i = s[0]
+ if i == 0xFF:
+ s = s + self.fp.read(1)
+ i = i16(s)
+ else:
+ # Skip non-0xFF junk
+ s = self.fp.read(1)
+ continue
+
+ if i in MARKER:
+ name, description, handler = MARKER[i]
+ if handler is not None:
+ handler(self, i)
+ if i == 0xFFDA: # start of scan
+ rawmode = self.mode
+ if self.mode == "CMYK":
+ rawmode = "CMYK;I" # assume adobe conventions
+ self.tile = [("jpeg", (0, 0) + self.size, 0, (rawmode, ""))]
+ # self.__offset = self.fp.tell()
+ break
+ s = self.fp.read(1)
+ elif i in {0, 0xFFFF}:
+ # padded marker or junk; move on
+ s = b"\xff"
+ elif i == 0xFF00: # Skip extraneous data (escaped 0xFF)
+ s = self.fp.read(1)
+ else:
+ msg = "no marker found"
+ raise SyntaxError(msg)
+
+ def load_read(self, read_bytes):
+ """
+ internal: read more image data
+ For premature EOF and LOAD_TRUNCATED_IMAGES adds EOI marker
+ so libjpeg can finish decoding
+ """
+ s = self.fp.read(read_bytes)
+
+ if not s and ImageFile.LOAD_TRUNCATED_IMAGES and not hasattr(self, "_ended"):
+ # Premature EOF.
+ # Pretend file is finished adding EOI marker
+ self._ended = True
+ return b"\xFF\xD9"
+
+ return s
+
+ def draft(self, mode, size):
+ if len(self.tile) != 1:
+ return
+
+ # Protect from second call
+ if self.decoderconfig:
+ return
+
+ d, e, o, a = self.tile[0]
+ scale = 1
+ original_size = self.size
+
+ if a[0] == "RGB" and mode in ["L", "YCbCr"]:
+ self._mode = mode
+ a = mode, ""
+
+ if size:
+ scale = min(self.size[0] // size[0], self.size[1] // size[1])
+ for s in [8, 4, 2, 1]:
+ if scale >= s:
+ break
+ e = (
+ e[0],
+ e[1],
+ (e[2] - e[0] + s - 1) // s + e[0],
+ (e[3] - e[1] + s - 1) // s + e[1],
+ )
+ self._size = ((self.size[0] + s - 1) // s, (self.size[1] + s - 1) // s)
+ scale = s
+
+ self.tile = [(d, e, o, a)]
+ self.decoderconfig = (scale, 0)
+
+ box = (0, 0, original_size[0] / scale, original_size[1] / scale)
+ return self.mode, box
+
+ def load_djpeg(self):
+ # ALTERNATIVE: handle JPEGs via the IJG command line utilities
+
+ f, path = tempfile.mkstemp()
+ os.close(f)
+ if os.path.exists(self.filename):
+ subprocess.check_call(["djpeg", "-outfile", path, self.filename])
+ else:
+ try:
+ os.unlink(path)
+ except OSError:
+ pass
+
+ msg = "Invalid Filename"
+ raise ValueError(msg)
+
+ try:
+ with Image.open(path) as _im:
+ _im.load()
+ self.im = _im.im
+ finally:
+ try:
+ os.unlink(path)
+ except OSError:
+ pass
+
+ self._mode = self.im.mode
+ self._size = self.im.size
+
+ self.tile = []
+
+ def _getexif(self):
+ return _getexif(self)
+
+ def _getmp(self):
+ return _getmp(self)
+
+ def getxmp(self):
+ """
+ Returns a dictionary containing the XMP tags.
+ Requires defusedxml to be installed.
+
+ :returns: XMP tags in a dictionary.
+ """
+
+ for segment, content in self.applist:
+ if segment == "APP1":
+ marker, xmp_tags = content.split(b"\x00")[:2]
+ if marker == b"http://ns.adobe.com/xap/1.0/":
+ return self._getxmp(xmp_tags)
+ return {}
+
+
+def _getexif(self):
+ if "exif" not in self.info:
+ return None
+ return self.getexif()._get_merged_dict()
+
+
+def _getmp(self):
+ # Extract MP information. This method was inspired by the "highly
+ # experimental" _getexif version that's been in use for years now,
+ # itself based on the ImageFileDirectory class in the TIFF plugin.
+
+ # The MP record essentially consists of a TIFF file embedded in a JPEG
+ # application marker.
+ try:
+ data = self.info["mp"]
+ except KeyError:
+ return None
+ file_contents = io.BytesIO(data)
+ head = file_contents.read(8)
+ endianness = ">" if head[:4] == b"\x4d\x4d\x00\x2a" else "<"
+ # process dictionary
+ from . import TiffImagePlugin
+
+ try:
+ info = TiffImagePlugin.ImageFileDirectory_v2(head)
+ file_contents.seek(info.next)
+ info.load(file_contents)
+ mp = dict(info)
+ except Exception as e:
+ msg = "malformed MP Index (unreadable directory)"
+ raise SyntaxError(msg) from e
+ # it's an error not to have a number of images
+ try:
+ quant = mp[0xB001]
+ except KeyError as e:
+ msg = "malformed MP Index (no number of images)"
+ raise SyntaxError(msg) from e
+ # get MP entries
+ mpentries = []
+ try:
+ rawmpentries = mp[0xB002]
+ for entrynum in range(0, quant):
+ unpackedentry = struct.unpack_from(
+ f"{endianness}LLLHH", rawmpentries, entrynum * 16
+ )
+ labels = ("Attribute", "Size", "DataOffset", "EntryNo1", "EntryNo2")
+ mpentry = dict(zip(labels, unpackedentry))
+ mpentryattr = {
+ "DependentParentImageFlag": bool(mpentry["Attribute"] & (1 << 31)),
+ "DependentChildImageFlag": bool(mpentry["Attribute"] & (1 << 30)),
+ "RepresentativeImageFlag": bool(mpentry["Attribute"] & (1 << 29)),
+ "Reserved": (mpentry["Attribute"] & (3 << 27)) >> 27,
+ "ImageDataFormat": (mpentry["Attribute"] & (7 << 24)) >> 24,
+ "MPType": mpentry["Attribute"] & 0x00FFFFFF,
+ }
+ if mpentryattr["ImageDataFormat"] == 0:
+ mpentryattr["ImageDataFormat"] = "JPEG"
+ else:
+ msg = "unsupported picture format in MPO"
+ raise SyntaxError(msg)
+ mptypemap = {
+ 0x000000: "Undefined",
+ 0x010001: "Large Thumbnail (VGA Equivalent)",
+ 0x010002: "Large Thumbnail (Full HD Equivalent)",
+ 0x020001: "Multi-Frame Image (Panorama)",
+ 0x020002: "Multi-Frame Image: (Disparity)",
+ 0x020003: "Multi-Frame Image: (Multi-Angle)",
+ 0x030000: "Baseline MP Primary Image",
+ }
+ mpentryattr["MPType"] = mptypemap.get(mpentryattr["MPType"], "Unknown")
+ mpentry["Attribute"] = mpentryattr
+ mpentries.append(mpentry)
+ mp[0xB002] = mpentries
+ except KeyError as e:
+ msg = "malformed MP Index (bad MP Entry)"
+ raise SyntaxError(msg) from e
+ # Next we should try and parse the individual image unique ID list;
+ # we don't because I've never seen this actually used in a real MPO
+ # file and so can't test it.
+ return mp
+
+
+# --------------------------------------------------------------------
+# stuff to save JPEG files
+
+RAWMODE = {
+ "1": "L",
+ "L": "L",
+ "RGB": "RGB",
+ "RGBX": "RGB",
+ "CMYK": "CMYK;I", # assume adobe conventions
+ "YCbCr": "YCbCr",
+}
+
+# fmt: off
+zigzag_index = (
+ 0, 1, 5, 6, 14, 15, 27, 28,
+ 2, 4, 7, 13, 16, 26, 29, 42,
+ 3, 8, 12, 17, 25, 30, 41, 43,
+ 9, 11, 18, 24, 31, 40, 44, 53,
+ 10, 19, 23, 32, 39, 45, 52, 54,
+ 20, 22, 33, 38, 46, 51, 55, 60,
+ 21, 34, 37, 47, 50, 56, 59, 61,
+ 35, 36, 48, 49, 57, 58, 62, 63,
+)
+
+samplings = {
+ (1, 1, 1, 1, 1, 1): 0,
+ (2, 1, 1, 1, 1, 1): 1,
+ (2, 2, 1, 1, 1, 1): 2,
+}
+# fmt: on
+
+
+def get_sampling(im):
+ # There's no subsampling when images have only 1 layer
+ # (grayscale images) or when they are CMYK (4 layers),
+ # so set subsampling to the default value.
+ #
+ # NOTE: currently Pillow can't encode JPEG to YCCK format.
+ # If YCCK support is added in the future, subsampling code will have
+ # to be updated (here and in JpegEncode.c) to deal with 4 layers.
+ if not hasattr(im, "layers") or im.layers in (1, 4):
+ return -1
+ sampling = im.layer[0][1:3] + im.layer[1][1:3] + im.layer[2][1:3]
+ return samplings.get(sampling, -1)
+
+
+def _save(im, fp, filename):
+ if im.width == 0 or im.height == 0:
+ msg = "cannot write empty image as JPEG"
+ raise ValueError(msg)
+
+ try:
+ rawmode = RAWMODE[im.mode]
+ except KeyError as e:
+ msg = f"cannot write mode {im.mode} as JPEG"
+ raise OSError(msg) from e
+
+ info = im.encoderinfo
+
+ dpi = [round(x) for x in info.get("dpi", (0, 0))]
+
+ quality = info.get("quality", -1)
+ subsampling = info.get("subsampling", -1)
+ qtables = info.get("qtables")
+
+ if quality == "keep":
+ quality = -1
+ subsampling = "keep"
+ qtables = "keep"
+ elif quality in presets:
+ preset = presets[quality]
+ quality = -1
+ subsampling = preset.get("subsampling", -1)
+ qtables = preset.get("quantization")
+ elif not isinstance(quality, int):
+ msg = "Invalid quality setting"
+ raise ValueError(msg)
+ else:
+ if subsampling in presets:
+ subsampling = presets[subsampling].get("subsampling", -1)
+ if isinstance(qtables, str) and qtables in presets:
+ qtables = presets[qtables].get("quantization")
+
+ if subsampling == "4:4:4":
+ subsampling = 0
+ elif subsampling == "4:2:2":
+ subsampling = 1
+ elif subsampling == "4:2:0":
+ subsampling = 2
+ elif subsampling == "4:1:1":
+ # For compatibility. Before Pillow 4.3, 4:1:1 actually meant 4:2:0.
+ # Set 4:2:0 if someone is still using that value.
+ subsampling = 2
+ elif subsampling == "keep":
+ if im.format != "JPEG":
+ msg = "Cannot use 'keep' when original image is not a JPEG"
+ raise ValueError(msg)
+ subsampling = get_sampling(im)
+
+ def validate_qtables(qtables):
+ if qtables is None:
+ return qtables
+ if isinstance(qtables, str):
+ try:
+ lines = [
+ int(num)
+ for line in qtables.splitlines()
+ for num in line.split("#", 1)[0].split()
+ ]
+ except ValueError as e:
+ msg = "Invalid quantization table"
+ raise ValueError(msg) from e
+ else:
+ qtables = [lines[s : s + 64] for s in range(0, len(lines), 64)]
+ if isinstance(qtables, (tuple, list, dict)):
+ if isinstance(qtables, dict):
+ qtables = [
+ qtables[key] for key in range(len(qtables)) if key in qtables
+ ]
+ elif isinstance(qtables, tuple):
+ qtables = list(qtables)
+ if not (0 < len(qtables) < 5):
+ msg = "None or too many quantization tables"
+ raise ValueError(msg)
+ for idx, table in enumerate(qtables):
+ try:
+ if len(table) != 64:
+ msg = "Invalid quantization table"
+ raise TypeError(msg)
+ table = array.array("H", table)
+ except TypeError as e:
+ msg = "Invalid quantization table"
+ raise ValueError(msg) from e
+ else:
+ qtables[idx] = list(table)
+ return qtables
+
+ if qtables == "keep":
+ if im.format != "JPEG":
+ msg = "Cannot use 'keep' when original image is not a JPEG"
+ raise ValueError(msg)
+ qtables = getattr(im, "quantization", None)
+ qtables = validate_qtables(qtables)
+
+ extra = info.get("extra", b"")
+
+ MAX_BYTES_IN_MARKER = 65533
+ icc_profile = info.get("icc_profile")
+ if icc_profile:
+ ICC_OVERHEAD_LEN = 14
+ MAX_DATA_BYTES_IN_MARKER = MAX_BYTES_IN_MARKER - ICC_OVERHEAD_LEN
+ markers = []
+ while icc_profile:
+ markers.append(icc_profile[:MAX_DATA_BYTES_IN_MARKER])
+ icc_profile = icc_profile[MAX_DATA_BYTES_IN_MARKER:]
+ i = 1
+ for marker in markers:
+ size = o16(2 + ICC_OVERHEAD_LEN + len(marker))
+ extra += (
+ b"\xFF\xE2"
+ + size
+ + b"ICC_PROFILE\0"
+ + o8(i)
+ + o8(len(markers))
+ + marker
+ )
+ i += 1
+
+ comment = info.get("comment", im.info.get("comment"))
+
+ # "progressive" is the official name, but older documentation
+ # says "progression"
+ # FIXME: issue a warning if the wrong form is used (post-1.1.7)
+ progressive = info.get("progressive", False) or info.get("progression", False)
+
+ optimize = info.get("optimize", False)
+
+ exif = info.get("exif", b"")
+ if isinstance(exif, Image.Exif):
+ exif = exif.tobytes()
+ if len(exif) > MAX_BYTES_IN_MARKER:
+ msg = "EXIF data is too long"
+ raise ValueError(msg)
+
+ # get keyword arguments
+ im.encoderconfig = (
+ quality,
+ progressive,
+ info.get("smooth", 0),
+ optimize,
+ info.get("keep_rgb", False),
+ info.get("streamtype", 0),
+ dpi[0],
+ dpi[1],
+ subsampling,
+ info.get("restart_marker_blocks", 0),
+ info.get("restart_marker_rows", 0),
+ qtables,
+ comment,
+ extra,
+ exif,
+ )
+
+ # if we optimize, libjpeg needs a buffer big enough to hold the whole image
+ # in a shot. Guessing on the size, at im.size bytes. (raw pixel size is
+ # channels*size, this is a value that's been used in a django patch.
+ # https://github.com/matthewwithanm/django-imagekit/issues/50
+ bufsize = 0
+ if optimize or progressive:
+ # CMYK can be bigger
+ if im.mode == "CMYK":
+ bufsize = 4 * im.size[0] * im.size[1]
+ # keep sets quality to -1, but the actual value may be high.
+ elif quality >= 95 or quality == -1:
+ bufsize = 2 * im.size[0] * im.size[1]
+ else:
+ bufsize = im.size[0] * im.size[1]
+ if exif:
+ bufsize += len(exif) + 5
+ if extra:
+ bufsize += len(extra) + 1
+ else:
+ # The EXIF info needs to be written as one block, + APP1, + one spare byte.
+ # Ensure that our buffer is big enough. Same with the icc_profile block.
+ bufsize = max(bufsize, len(exif) + 5, len(extra) + 1)
+
+ ImageFile._save(im, fp, [("jpeg", (0, 0) + im.size, 0, rawmode)], bufsize)
+
+
+def _save_cjpeg(im, fp, filename):
+ # ALTERNATIVE: handle JPEGs via the IJG command line utilities.
+ tempfile = im._dump()
+ subprocess.check_call(["cjpeg", "-outfile", filename, tempfile])
+ try:
+ os.unlink(tempfile)
+ except OSError:
+ pass
+
+
+##
+# Factory for making JPEG and MPO instances
+def jpeg_factory(fp=None, filename=None):
+ im = JpegImageFile(fp, filename)
+ try:
+ mpheader = im._getmp()
+ if mpheader[45057] > 1:
+ # It's actually an MPO
+ from .MpoImagePlugin import MpoImageFile
+
+ # Don't reload everything, just convert it.
+ im = MpoImageFile.adopt(im, mpheader)
+ except (TypeError, IndexError):
+ # It is really a JPEG
+ pass
+ except SyntaxError:
+ warnings.warn(
+ "Image appears to be a malformed MPO file, it will be "
+ "interpreted as a base JPEG file"
+ )
+ return im
+
+
+# ---------------------------------------------------------------------
+# Registry stuff
+
+Image.register_open(JpegImageFile.format, jpeg_factory, _accept)
+Image.register_save(JpegImageFile.format, _save)
+
+Image.register_extensions(JpegImageFile.format, [".jfif", ".jpe", ".jpg", ".jpeg"])
+
+Image.register_mime(JpegImageFile.format, "image/jpeg")
diff --git a/Lib/site-packages/PIL/JpegPresets.py b/Lib/site-packages/PIL/JpegPresets.py
new file mode 100644
index 0000000..9ecfdb2
--- /dev/null
+++ b/Lib/site-packages/PIL/JpegPresets.py
@@ -0,0 +1,241 @@
+"""
+JPEG quality settings equivalent to the Photoshop settings.
+Can be used when saving JPEG files.
+
+The following presets are available by default:
+``web_low``, ``web_medium``, ``web_high``, ``web_very_high``, ``web_maximum``,
+``low``, ``medium``, ``high``, ``maximum``.
+More presets can be added to the :py:data:`presets` dict if needed.
+
+To apply the preset, specify::
+
+ quality="preset_name"
+
+To apply only the quantization table::
+
+ qtables="preset_name"
+
+To apply only the subsampling setting::
+
+ subsampling="preset_name"
+
+Example::
+
+ im.save("image_name.jpg", quality="web_high")
+
+Subsampling
+-----------
+
+Subsampling is the practice of encoding images by implementing less resolution
+for chroma information than for luma information.
+(ref.: https://en.wikipedia.org/wiki/Chroma_subsampling)
+
+Possible subsampling values are 0, 1 and 2 that correspond to 4:4:4, 4:2:2 and
+4:2:0.
+
+You can get the subsampling of a JPEG with the
+:func:`.JpegImagePlugin.get_sampling` function.
+
+In JPEG compressed data a JPEG marker is used instead of an EXIF tag.
+(ref.: https://exiv2.org/tags.html)
+
+
+Quantization tables
+-------------------
+
+They are values use by the DCT (Discrete cosine transform) to remove
+*unnecessary* information from the image (the lossy part of the compression).
+(ref.: https://en.wikipedia.org/wiki/Quantization_matrix#Quantization_matrices,
+https://en.wikipedia.org/wiki/JPEG#Quantization)
+
+You can get the quantization tables of a JPEG with::
+
+ im.quantization
+
+This will return a dict with a number of lists. You can pass this dict
+directly as the qtables argument when saving a JPEG.
+
+The quantization table format in presets is a list with sublists. These formats
+are interchangeable.
+
+Libjpeg ref.:
+https://web.archive.org/web/20120328125543/http://www.jpegcameras.com/libjpeg/libjpeg-3.html
+
+"""
+from __future__ import annotations
+
+# fmt: off
+presets = {
+ 'web_low': {'subsampling': 2, # "4:2:0"
+ 'quantization': [
+ [20, 16, 25, 39, 50, 46, 62, 68,
+ 16, 18, 23, 38, 38, 53, 65, 68,
+ 25, 23, 31, 38, 53, 65, 68, 68,
+ 39, 38, 38, 53, 65, 68, 68, 68,
+ 50, 38, 53, 65, 68, 68, 68, 68,
+ 46, 53, 65, 68, 68, 68, 68, 68,
+ 62, 65, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68],
+ [21, 25, 32, 38, 54, 68, 68, 68,
+ 25, 28, 24, 38, 54, 68, 68, 68,
+ 32, 24, 32, 43, 66, 68, 68, 68,
+ 38, 38, 43, 53, 68, 68, 68, 68,
+ 54, 54, 66, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68]
+ ]},
+ 'web_medium': {'subsampling': 2, # "4:2:0"
+ 'quantization': [
+ [16, 11, 11, 16, 23, 27, 31, 30,
+ 11, 12, 12, 15, 20, 23, 23, 30,
+ 11, 12, 13, 16, 23, 26, 35, 47,
+ 16, 15, 16, 23, 26, 37, 47, 64,
+ 23, 20, 23, 26, 39, 51, 64, 64,
+ 27, 23, 26, 37, 51, 64, 64, 64,
+ 31, 23, 35, 47, 64, 64, 64, 64,
+ 30, 30, 47, 64, 64, 64, 64, 64],
+ [17, 15, 17, 21, 20, 26, 38, 48,
+ 15, 19, 18, 17, 20, 26, 35, 43,
+ 17, 18, 20, 22, 26, 30, 46, 53,
+ 21, 17, 22, 28, 30, 39, 53, 64,
+ 20, 20, 26, 30, 39, 48, 64, 64,
+ 26, 26, 30, 39, 48, 63, 64, 64,
+ 38, 35, 46, 53, 64, 64, 64, 64,
+ 48, 43, 53, 64, 64, 64, 64, 64]
+ ]},
+ 'web_high': {'subsampling': 0, # "4:4:4"
+ 'quantization': [
+ [6, 4, 4, 6, 9, 11, 12, 16,
+ 4, 5, 5, 6, 8, 10, 12, 12,
+ 4, 5, 5, 6, 10, 12, 14, 19,
+ 6, 6, 6, 11, 12, 15, 19, 28,
+ 9, 8, 10, 12, 16, 20, 27, 31,
+ 11, 10, 12, 15, 20, 27, 31, 31,
+ 12, 12, 14, 19, 27, 31, 31, 31,
+ 16, 12, 19, 28, 31, 31, 31, 31],
+ [7, 7, 13, 24, 26, 31, 31, 31,
+ 7, 12, 16, 21, 31, 31, 31, 31,
+ 13, 16, 17, 31, 31, 31, 31, 31,
+ 24, 21, 31, 31, 31, 31, 31, 31,
+ 26, 31, 31, 31, 31, 31, 31, 31,
+ 31, 31, 31, 31, 31, 31, 31, 31,
+ 31, 31, 31, 31, 31, 31, 31, 31,
+ 31, 31, 31, 31, 31, 31, 31, 31]
+ ]},
+ 'web_very_high': {'subsampling': 0, # "4:4:4"
+ 'quantization': [
+ [2, 2, 2, 2, 3, 4, 5, 6,
+ 2, 2, 2, 2, 3, 4, 5, 6,
+ 2, 2, 2, 2, 4, 5, 7, 9,
+ 2, 2, 2, 4, 5, 7, 9, 12,
+ 3, 3, 4, 5, 8, 10, 12, 12,
+ 4, 4, 5, 7, 10, 12, 12, 12,
+ 5, 5, 7, 9, 12, 12, 12, 12,
+ 6, 6, 9, 12, 12, 12, 12, 12],
+ [3, 3, 5, 9, 13, 15, 15, 15,
+ 3, 4, 6, 11, 14, 12, 12, 12,
+ 5, 6, 9, 14, 12, 12, 12, 12,
+ 9, 11, 14, 12, 12, 12, 12, 12,
+ 13, 14, 12, 12, 12, 12, 12, 12,
+ 15, 12, 12, 12, 12, 12, 12, 12,
+ 15, 12, 12, 12, 12, 12, 12, 12,
+ 15, 12, 12, 12, 12, 12, 12, 12]
+ ]},
+ 'web_maximum': {'subsampling': 0, # "4:4:4"
+ 'quantization': [
+ [1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 2,
+ 1, 1, 1, 1, 1, 1, 2, 2,
+ 1, 1, 1, 1, 1, 2, 2, 3,
+ 1, 1, 1, 1, 2, 2, 3, 3,
+ 1, 1, 1, 2, 2, 3, 3, 3,
+ 1, 1, 2, 2, 3, 3, 3, 3],
+ [1, 1, 1, 2, 2, 3, 3, 3,
+ 1, 1, 1, 2, 3, 3, 3, 3,
+ 1, 1, 1, 3, 3, 3, 3, 3,
+ 2, 2, 3, 3, 3, 3, 3, 3,
+ 2, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3]
+ ]},
+ 'low': {'subsampling': 2, # "4:2:0"
+ 'quantization': [
+ [18, 14, 14, 21, 30, 35, 34, 17,
+ 14, 16, 16, 19, 26, 23, 12, 12,
+ 14, 16, 17, 21, 23, 12, 12, 12,
+ 21, 19, 21, 23, 12, 12, 12, 12,
+ 30, 26, 23, 12, 12, 12, 12, 12,
+ 35, 23, 12, 12, 12, 12, 12, 12,
+ 34, 12, 12, 12, 12, 12, 12, 12,
+ 17, 12, 12, 12, 12, 12, 12, 12],
+ [20, 19, 22, 27, 20, 20, 17, 17,
+ 19, 25, 23, 14, 14, 12, 12, 12,
+ 22, 23, 14, 14, 12, 12, 12, 12,
+ 27, 14, 14, 12, 12, 12, 12, 12,
+ 20, 14, 12, 12, 12, 12, 12, 12,
+ 20, 12, 12, 12, 12, 12, 12, 12,
+ 17, 12, 12, 12, 12, 12, 12, 12,
+ 17, 12, 12, 12, 12, 12, 12, 12]
+ ]},
+ 'medium': {'subsampling': 2, # "4:2:0"
+ 'quantization': [
+ [12, 8, 8, 12, 17, 21, 24, 17,
+ 8, 9, 9, 11, 15, 19, 12, 12,
+ 8, 9, 10, 12, 19, 12, 12, 12,
+ 12, 11, 12, 21, 12, 12, 12, 12,
+ 17, 15, 19, 12, 12, 12, 12, 12,
+ 21, 19, 12, 12, 12, 12, 12, 12,
+ 24, 12, 12, 12, 12, 12, 12, 12,
+ 17, 12, 12, 12, 12, 12, 12, 12],
+ [13, 11, 13, 16, 20, 20, 17, 17,
+ 11, 14, 14, 14, 14, 12, 12, 12,
+ 13, 14, 14, 14, 12, 12, 12, 12,
+ 16, 14, 14, 12, 12, 12, 12, 12,
+ 20, 14, 12, 12, 12, 12, 12, 12,
+ 20, 12, 12, 12, 12, 12, 12, 12,
+ 17, 12, 12, 12, 12, 12, 12, 12,
+ 17, 12, 12, 12, 12, 12, 12, 12]
+ ]},
+ 'high': {'subsampling': 0, # "4:4:4"
+ 'quantization': [
+ [6, 4, 4, 6, 9, 11, 12, 16,
+ 4, 5, 5, 6, 8, 10, 12, 12,
+ 4, 5, 5, 6, 10, 12, 12, 12,
+ 6, 6, 6, 11, 12, 12, 12, 12,
+ 9, 8, 10, 12, 12, 12, 12, 12,
+ 11, 10, 12, 12, 12, 12, 12, 12,
+ 12, 12, 12, 12, 12, 12, 12, 12,
+ 16, 12, 12, 12, 12, 12, 12, 12],
+ [7, 7, 13, 24, 20, 20, 17, 17,
+ 7, 12, 16, 14, 14, 12, 12, 12,
+ 13, 16, 14, 14, 12, 12, 12, 12,
+ 24, 14, 14, 12, 12, 12, 12, 12,
+ 20, 14, 12, 12, 12, 12, 12, 12,
+ 20, 12, 12, 12, 12, 12, 12, 12,
+ 17, 12, 12, 12, 12, 12, 12, 12,
+ 17, 12, 12, 12, 12, 12, 12, 12]
+ ]},
+ 'maximum': {'subsampling': 0, # "4:4:4"
+ 'quantization': [
+ [2, 2, 2, 2, 3, 4, 5, 6,
+ 2, 2, 2, 2, 3, 4, 5, 6,
+ 2, 2, 2, 2, 4, 5, 7, 9,
+ 2, 2, 2, 4, 5, 7, 9, 12,
+ 3, 3, 4, 5, 8, 10, 12, 12,
+ 4, 4, 5, 7, 10, 12, 12, 12,
+ 5, 5, 7, 9, 12, 12, 12, 12,
+ 6, 6, 9, 12, 12, 12, 12, 12],
+ [3, 3, 5, 9, 13, 15, 15, 15,
+ 3, 4, 6, 10, 14, 12, 12, 12,
+ 5, 6, 9, 14, 12, 12, 12, 12,
+ 9, 10, 14, 12, 12, 12, 12, 12,
+ 13, 14, 12, 12, 12, 12, 12, 12,
+ 15, 12, 12, 12, 12, 12, 12, 12,
+ 15, 12, 12, 12, 12, 12, 12, 12,
+ 15, 12, 12, 12, 12, 12, 12, 12]
+ ]},
+}
+# fmt: on
diff --git a/Lib/site-packages/PIL/McIdasImagePlugin.py b/Lib/site-packages/PIL/McIdasImagePlugin.py
new file mode 100644
index 0000000..9a85c0d
--- /dev/null
+++ b/Lib/site-packages/PIL/McIdasImagePlugin.py
@@ -0,0 +1,76 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# Basic McIdas support for PIL
+#
+# History:
+# 1997-05-05 fl Created (8-bit images only)
+# 2009-03-08 fl Added 16/32-bit support.
+#
+# Thanks to Richard Jones and Craig Swank for specs and samples.
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1997.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import struct
+
+from . import Image, ImageFile
+
+
+def _accept(s):
+ return s[:8] == b"\x00\x00\x00\x00\x00\x00\x00\x04"
+
+
+##
+# Image plugin for McIdas area images.
+
+
+class McIdasImageFile(ImageFile.ImageFile):
+ format = "MCIDAS"
+ format_description = "McIdas area file"
+
+ def _open(self):
+ # parse area file directory
+ s = self.fp.read(256)
+ if not _accept(s) or len(s) != 256:
+ msg = "not an McIdas area file"
+ raise SyntaxError(msg)
+
+ self.area_descriptor_raw = s
+ self.area_descriptor = w = [0] + list(struct.unpack("!64i", s))
+
+ # get mode
+ if w[11] == 1:
+ mode = rawmode = "L"
+ elif w[11] == 2:
+ # FIXME: add memory map support
+ mode = "I"
+ rawmode = "I;16B"
+ elif w[11] == 4:
+ # FIXME: add memory map support
+ mode = "I"
+ rawmode = "I;32B"
+ else:
+ msg = "unsupported McIdas format"
+ raise SyntaxError(msg)
+
+ self._mode = mode
+ self._size = w[10], w[9]
+
+ offset = w[34] + w[15]
+ stride = w[15] + w[10] * w[11] * w[14]
+
+ self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride, 1))]
+
+
+# --------------------------------------------------------------------
+# registry
+
+Image.register_open(McIdasImageFile.format, McIdasImageFile, _accept)
+
+# no default extension
diff --git a/Lib/site-packages/PIL/MicImagePlugin.py b/Lib/site-packages/PIL/MicImagePlugin.py
new file mode 100644
index 0000000..f4529d9
--- /dev/null
+++ b/Lib/site-packages/PIL/MicImagePlugin.py
@@ -0,0 +1,107 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# Microsoft Image Composer support for PIL
+#
+# Notes:
+# uses TiffImagePlugin.py to read the actual image streams
+#
+# History:
+# 97-01-20 fl Created
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1997.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import olefile
+
+from . import Image, TiffImagePlugin
+
+#
+# --------------------------------------------------------------------
+
+
+def _accept(prefix):
+ return prefix[:8] == olefile.MAGIC
+
+
+##
+# Image plugin for Microsoft's Image Composer file format.
+
+
+class MicImageFile(TiffImagePlugin.TiffImageFile):
+ format = "MIC"
+ format_description = "Microsoft Image Composer"
+ _close_exclusive_fp_after_loading = False
+
+ def _open(self):
+ # read the OLE directory and see if this is a likely
+ # to be a Microsoft Image Composer file
+
+ try:
+ self.ole = olefile.OleFileIO(self.fp)
+ except OSError as e:
+ msg = "not an MIC file; invalid OLE file"
+ raise SyntaxError(msg) from e
+
+ # find ACI subfiles with Image members (maybe not the
+ # best way to identify MIC files, but what the... ;-)
+
+ self.images = [
+ path
+ for path in self.ole.listdir()
+ if path[1:] and path[0][-4:] == ".ACI" and path[1] == "Image"
+ ]
+
+ # if we didn't find any images, this is probably not
+ # an MIC file.
+ if not self.images:
+ msg = "not an MIC file; no image entries"
+ raise SyntaxError(msg)
+
+ self.frame = None
+ self._n_frames = len(self.images)
+ self.is_animated = self._n_frames > 1
+
+ self.__fp = self.fp
+ self.seek(0)
+
+ def seek(self, frame):
+ if not self._seek_check(frame):
+ return
+ try:
+ filename = self.images[frame]
+ except IndexError as e:
+ msg = "no such frame"
+ raise EOFError(msg) from e
+
+ self.fp = self.ole.openstream(filename)
+
+ TiffImagePlugin.TiffImageFile._open(self)
+
+ self.frame = frame
+
+ def tell(self):
+ return self.frame
+
+ def close(self):
+ self.__fp.close()
+ self.ole.close()
+ super().close()
+
+ def __exit__(self, *args):
+ self.__fp.close()
+ self.ole.close()
+ super().__exit__()
+
+
+#
+# --------------------------------------------------------------------
+
+Image.register_open(MicImageFile.format, MicImageFile, _accept)
+
+Image.register_extension(MicImageFile.format, ".mic")
diff --git a/Lib/site-packages/PIL/MpegImagePlugin.py b/Lib/site-packages/PIL/MpegImagePlugin.py
new file mode 100644
index 0000000..f4e598c
--- /dev/null
+++ b/Lib/site-packages/PIL/MpegImagePlugin.py
@@ -0,0 +1,82 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# MPEG file handling
+#
+# History:
+# 95-09-09 fl Created
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1995.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+from . import Image, ImageFile
+from ._binary import i8
+
+#
+# Bitstream parser
+
+
+class BitStream:
+ def __init__(self, fp):
+ self.fp = fp
+ self.bits = 0
+ self.bitbuffer = 0
+
+ def next(self):
+ return i8(self.fp.read(1))
+
+ def peek(self, bits):
+ while self.bits < bits:
+ c = self.next()
+ if c < 0:
+ self.bits = 0
+ continue
+ self.bitbuffer = (self.bitbuffer << 8) + c
+ self.bits += 8
+ return self.bitbuffer >> (self.bits - bits) & (1 << bits) - 1
+
+ def skip(self, bits):
+ while self.bits < bits:
+ self.bitbuffer = (self.bitbuffer << 8) + i8(self.fp.read(1))
+ self.bits += 8
+ self.bits = self.bits - bits
+
+ def read(self, bits):
+ v = self.peek(bits)
+ self.bits = self.bits - bits
+ return v
+
+
+##
+# Image plugin for MPEG streams. This plugin can identify a stream,
+# but it cannot read it.
+
+
+class MpegImageFile(ImageFile.ImageFile):
+ format = "MPEG"
+ format_description = "MPEG"
+
+ def _open(self):
+ s = BitStream(self.fp)
+
+ if s.read(32) != 0x1B3:
+ msg = "not an MPEG file"
+ raise SyntaxError(msg)
+
+ self._mode = "RGB"
+ self._size = s.read(12), s.read(12)
+
+
+# --------------------------------------------------------------------
+# Registry stuff
+
+Image.register_open(MpegImageFile.format, MpegImageFile)
+
+Image.register_extensions(MpegImageFile.format, [".mpg", ".mpeg"])
+
+Image.register_mime(MpegImageFile.format, "video/mpeg")
diff --git a/Lib/site-packages/PIL/MpoImagePlugin.py b/Lib/site-packages/PIL/MpoImagePlugin.py
new file mode 100644
index 0000000..199a100
--- /dev/null
+++ b/Lib/site-packages/PIL/MpoImagePlugin.py
@@ -0,0 +1,195 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# MPO file handling
+#
+# See "Multi-Picture Format" (CIPA DC-007-Translation 2009, Standard of the
+# Camera & Imaging Products Association)
+#
+# The multi-picture object combines multiple JPEG images (with a modified EXIF
+# data format) into a single file. While it can theoretically be used much like
+# a GIF animation, it is commonly used to represent 3D photographs and is (as
+# of this writing) the most commonly used format by 3D cameras.
+#
+# History:
+# 2014-03-13 Feneric Created
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import itertools
+import os
+import struct
+
+from . import (
+ ExifTags,
+ Image,
+ ImageFile,
+ ImageSequence,
+ JpegImagePlugin,
+ TiffImagePlugin,
+)
+from ._binary import i16be as i16
+from ._binary import o32le
+
+
+def _save(im, fp, filename):
+ JpegImagePlugin._save(im, fp, filename)
+
+
+def _save_all(im, fp, filename):
+ append_images = im.encoderinfo.get("append_images", [])
+ if not append_images:
+ try:
+ animated = im.is_animated
+ except AttributeError:
+ animated = False
+ if not animated:
+ _save(im, fp, filename)
+ return
+
+ mpf_offset = 28
+ offsets = []
+ for imSequence in itertools.chain([im], append_images):
+ for im_frame in ImageSequence.Iterator(imSequence):
+ if not offsets:
+ # APP2 marker
+ im_frame.encoderinfo["extra"] = (
+ b"\xFF\xE2" + struct.pack(">H", 6 + 82) + b"MPF\0" + b" " * 82
+ )
+ exif = im_frame.encoderinfo.get("exif")
+ if isinstance(exif, Image.Exif):
+ exif = exif.tobytes()
+ im_frame.encoderinfo["exif"] = exif
+ if exif:
+ mpf_offset += 4 + len(exif)
+
+ JpegImagePlugin._save(im_frame, fp, filename)
+ offsets.append(fp.tell())
+ else:
+ im_frame.save(fp, "JPEG")
+ offsets.append(fp.tell() - offsets[-1])
+
+ ifd = TiffImagePlugin.ImageFileDirectory_v2()
+ ifd[0xB000] = b"0100"
+ ifd[0xB001] = len(offsets)
+
+ mpentries = b""
+ data_offset = 0
+ for i, size in enumerate(offsets):
+ if i == 0:
+ mptype = 0x030000 # Baseline MP Primary Image
+ else:
+ mptype = 0x000000 # Undefined
+ mpentries += struct.pack(" 1
+ self._fp = self.fp # FIXME: hack
+ self._fp.seek(self.__mpoffsets[0]) # get ready to read first frame
+ self.__frame = 0
+ self.offset = 0
+ # for now we can only handle reading and individual frame extraction
+ self.readonly = 1
+
+ def load_seek(self, pos):
+ self._fp.seek(pos)
+
+ def seek(self, frame):
+ if not self._seek_check(frame):
+ return
+ self.fp = self._fp
+ self.offset = self.__mpoffsets[frame]
+
+ self.fp.seek(self.offset + 2) # skip SOI marker
+ segment = self.fp.read(2)
+ if not segment:
+ msg = "No data found for frame"
+ raise ValueError(msg)
+ self._size = self._initial_size
+ if i16(segment) == 0xFFE1: # APP1
+ n = i16(self.fp.read(2)) - 2
+ self.info["exif"] = ImageFile._safe_read(self.fp, n)
+ self._reload_exif()
+
+ mptype = self.mpinfo[0xB002][frame]["Attribute"]["MPType"]
+ if mptype.startswith("Large Thumbnail"):
+ exif = self.getexif().get_ifd(ExifTags.IFD.Exif)
+ if 40962 in exif and 40963 in exif:
+ self._size = (exif[40962], exif[40963])
+ elif "exif" in self.info:
+ del self.info["exif"]
+ self._reload_exif()
+
+ self.tile = [("jpeg", (0, 0) + self.size, self.offset, (self.mode, ""))]
+ self.__frame = frame
+
+ def tell(self):
+ return self.__frame
+
+ @staticmethod
+ def adopt(jpeg_instance, mpheader=None):
+ """
+ Transform the instance of JpegImageFile into
+ an instance of MpoImageFile.
+ After the call, the JpegImageFile is extended
+ to be an MpoImageFile.
+
+ This is essentially useful when opening a JPEG
+ file that reveals itself as an MPO, to avoid
+ double call to _open.
+ """
+ jpeg_instance.__class__ = MpoImageFile
+ jpeg_instance._after_jpeg_open(mpheader)
+ return jpeg_instance
+
+
+# ---------------------------------------------------------------------
+# Registry stuff
+
+# Note that since MPO shares a factory with JPEG, we do not need to do a
+# separate registration for it here.
+# Image.register_open(MpoImageFile.format,
+# JpegImagePlugin.jpeg_factory, _accept)
+Image.register_save(MpoImageFile.format, _save)
+Image.register_save_all(MpoImageFile.format, _save_all)
+
+Image.register_extension(MpoImageFile.format, ".mpo")
+
+Image.register_mime(MpoImageFile.format, "image/mpo")
diff --git a/Lib/site-packages/PIL/MspImagePlugin.py b/Lib/site-packages/PIL/MspImagePlugin.py
new file mode 100644
index 0000000..77dac65
--- /dev/null
+++ b/Lib/site-packages/PIL/MspImagePlugin.py
@@ -0,0 +1,195 @@
+#
+# The Python Imaging Library.
+#
+# MSP file handling
+#
+# This is the format used by the Paint program in Windows 1 and 2.
+#
+# History:
+# 95-09-05 fl Created
+# 97-01-03 fl Read/write MSP images
+# 17-02-21 es Fixed RLE interpretation
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1995-97.
+# Copyright (c) Eric Soroos 2017.
+#
+# See the README file for information on usage and redistribution.
+#
+# More info on this format: https://archive.org/details/gg243631
+# Page 313:
+# Figure 205. Windows Paint Version 1: "DanM" Format
+# Figure 206. Windows Paint Version 2: "LinS" Format. Used in Windows V2.03
+#
+# See also: https://www.fileformat.info/format/mspaint/egff.htm
+from __future__ import annotations
+
+import io
+import struct
+
+from . import Image, ImageFile
+from ._binary import i16le as i16
+from ._binary import o16le as o16
+
+#
+# read MSP files
+
+
+def _accept(prefix):
+ return prefix[:4] in [b"DanM", b"LinS"]
+
+
+##
+# Image plugin for Windows MSP images. This plugin supports both
+# uncompressed (Windows 1.0).
+
+
+class MspImageFile(ImageFile.ImageFile):
+ format = "MSP"
+ format_description = "Windows Paint"
+
+ def _open(self):
+ # Header
+ s = self.fp.read(32)
+ if not _accept(s):
+ msg = "not an MSP file"
+ raise SyntaxError(msg)
+
+ # Header checksum
+ checksum = 0
+ for i in range(0, 32, 2):
+ checksum = checksum ^ i16(s, i)
+ if checksum != 0:
+ msg = "bad MSP checksum"
+ raise SyntaxError(msg)
+
+ self._mode = "1"
+ self._size = i16(s, 4), i16(s, 6)
+
+ if s[:4] == b"DanM":
+ self.tile = [("raw", (0, 0) + self.size, 32, ("1", 0, 1))]
+ else:
+ self.tile = [("MSP", (0, 0) + self.size, 32, None)]
+
+
+class MspDecoder(ImageFile.PyDecoder):
+ # The algo for the MSP decoder is from
+ # https://www.fileformat.info/format/mspaint/egff.htm
+ # cc-by-attribution -- That page references is taken from the
+ # Encyclopedia of Graphics File Formats and is licensed by
+ # O'Reilly under the Creative Common/Attribution license
+ #
+ # For RLE encoded files, the 32byte header is followed by a scan
+ # line map, encoded as one 16bit word of encoded byte length per
+ # line.
+ #
+ # NOTE: the encoded length of the line can be 0. This was not
+ # handled in the previous version of this encoder, and there's no
+ # mention of how to handle it in the documentation. From the few
+ # examples I've seen, I've assumed that it is a fill of the
+ # background color, in this case, white.
+ #
+ #
+ # Pseudocode of the decoder:
+ # Read a BYTE value as the RunType
+ # If the RunType value is zero
+ # Read next byte as the RunCount
+ # Read the next byte as the RunValue
+ # Write the RunValue byte RunCount times
+ # If the RunType value is non-zero
+ # Use this value as the RunCount
+ # Read and write the next RunCount bytes literally
+ #
+ # e.g.:
+ # 0x00 03 ff 05 00 01 02 03 04
+ # would yield the bytes:
+ # 0xff ff ff 00 01 02 03 04
+ #
+ # which are then interpreted as a bit packed mode '1' image
+
+ _pulls_fd = True
+
+ def decode(self, buffer):
+ img = io.BytesIO()
+ blank_line = bytearray((0xFF,) * ((self.state.xsize + 7) // 8))
+ try:
+ self.fd.seek(32)
+ rowmap = struct.unpack_from(
+ f"<{self.state.ysize}H", self.fd.read(self.state.ysize * 2)
+ )
+ except struct.error as e:
+ msg = "Truncated MSP file in row map"
+ raise OSError(msg) from e
+
+ for x, rowlen in enumerate(rowmap):
+ try:
+ if rowlen == 0:
+ img.write(blank_line)
+ continue
+ row = self.fd.read(rowlen)
+ if len(row) != rowlen:
+ msg = f"Truncated MSP file, expected {rowlen} bytes on row {x}"
+ raise OSError(msg)
+ idx = 0
+ while idx < rowlen:
+ runtype = row[idx]
+ idx += 1
+ if runtype == 0:
+ (runcount, runval) = struct.unpack_from("Bc", row, idx)
+ img.write(runval * runcount)
+ idx += 2
+ else:
+ runcount = runtype
+ img.write(row[idx : idx + runcount])
+ idx += runcount
+
+ except struct.error as e:
+ msg = f"Corrupted MSP file in row {x}"
+ raise OSError(msg) from e
+
+ self.set_as_raw(img.getvalue(), ("1", 0, 1))
+
+ return -1, 0
+
+
+Image.register_decoder("MSP", MspDecoder)
+
+
+#
+# write MSP files (uncompressed only)
+
+
+def _save(im, fp, filename):
+ if im.mode != "1":
+ msg = f"cannot write mode {im.mode} as MSP"
+ raise OSError(msg)
+
+ # create MSP header
+ header = [0] * 16
+
+ header[0], header[1] = i16(b"Da"), i16(b"nM") # version 1
+ header[2], header[3] = im.size
+ header[4], header[5] = 1, 1
+ header[6], header[7] = 1, 1
+ header[8], header[9] = im.size
+
+ checksum = 0
+ for h in header:
+ checksum = checksum ^ h
+ header[12] = checksum # FIXME: is this the right field?
+
+ # header
+ for h in header:
+ fp.write(o16(h))
+
+ # image body
+ ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 32, ("1", 0, 1))])
+
+
+#
+# registry
+
+Image.register_open(MspImageFile.format, MspImageFile, _accept)
+Image.register_save(MspImageFile.format, _save)
+
+Image.register_extension(MspImageFile.format, ".msp")
diff --git a/Lib/site-packages/PIL/PSDraw.py b/Lib/site-packages/PIL/PSDraw.py
new file mode 100644
index 0000000..848fc2f
--- /dev/null
+++ b/Lib/site-packages/PIL/PSDraw.py
@@ -0,0 +1,230 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# Simple PostScript graphics interface
+#
+# History:
+# 1996-04-20 fl Created
+# 1999-01-10 fl Added gsave/grestore to image method
+# 2005-05-04 fl Fixed floating point issue in image (from Eric Etheridge)
+#
+# Copyright (c) 1997-2005 by Secret Labs AB. All rights reserved.
+# Copyright (c) 1996 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import sys
+
+from . import EpsImagePlugin
+
+##
+# Simple PostScript graphics interface.
+
+
+class PSDraw:
+ """
+ Sets up printing to the given file. If ``fp`` is omitted,
+ ``sys.stdout.buffer`` or ``sys.stdout`` is assumed.
+ """
+
+ def __init__(self, fp=None):
+ if not fp:
+ try:
+ fp = sys.stdout.buffer
+ except AttributeError:
+ fp = sys.stdout
+ self.fp = fp
+
+ def begin_document(self, id=None):
+ """Set up printing of a document. (Write PostScript DSC header.)"""
+ # FIXME: incomplete
+ self.fp.write(
+ b"%!PS-Adobe-3.0\n"
+ b"save\n"
+ b"/showpage { } def\n"
+ b"%%EndComments\n"
+ b"%%BeginDocument\n"
+ )
+ # self.fp.write(ERROR_PS) # debugging!
+ self.fp.write(EDROFF_PS)
+ self.fp.write(VDI_PS)
+ self.fp.write(b"%%EndProlog\n")
+ self.isofont = {}
+
+ def end_document(self):
+ """Ends printing. (Write PostScript DSC footer.)"""
+ self.fp.write(b"%%EndDocument\nrestore showpage\n%%End\n")
+ if hasattr(self.fp, "flush"):
+ self.fp.flush()
+
+ def setfont(self, font, size):
+ """
+ Selects which font to use.
+
+ :param font: A PostScript font name
+ :param size: Size in points.
+ """
+ font = bytes(font, "UTF-8")
+ if font not in self.isofont:
+ # reencode font
+ self.fp.write(b"/PSDraw-%s ISOLatin1Encoding /%s E\n" % (font, font))
+ self.isofont[font] = 1
+ # rough
+ self.fp.write(b"/F0 %d /PSDraw-%s F\n" % (size, font))
+
+ def line(self, xy0, xy1):
+ """
+ Draws a line between the two points. Coordinates are given in
+ PostScript point coordinates (72 points per inch, (0, 0) is the lower
+ left corner of the page).
+ """
+ self.fp.write(b"%d %d %d %d Vl\n" % (*xy0, *xy1))
+
+ def rectangle(self, box):
+ """
+ Draws a rectangle.
+
+ :param box: A tuple of four integers, specifying left, bottom, width and
+ height.
+ """
+ self.fp.write(b"%d %d M 0 %d %d Vr\n" % box)
+
+ def text(self, xy, text):
+ """
+ Draws text at the given position. You must use
+ :py:meth:`~PIL.PSDraw.PSDraw.setfont` before calling this method.
+ """
+ text = bytes(text, "UTF-8")
+ text = b"\\(".join(text.split(b"("))
+ text = b"\\)".join(text.split(b")"))
+ xy += (text,)
+ self.fp.write(b"%d %d M (%s) S\n" % xy)
+
+ def image(self, box, im, dpi=None):
+ """Draw a PIL image, centered in the given box."""
+ # default resolution depends on mode
+ if not dpi:
+ if im.mode == "1":
+ dpi = 200 # fax
+ else:
+ dpi = 100 # grayscale
+ # image size (on paper)
+ x = im.size[0] * 72 / dpi
+ y = im.size[1] * 72 / dpi
+ # max allowed size
+ xmax = float(box[2] - box[0])
+ ymax = float(box[3] - box[1])
+ if x > xmax:
+ y = y * xmax / x
+ x = xmax
+ if y > ymax:
+ x = x * ymax / y
+ y = ymax
+ dx = (xmax - x) / 2 + box[0]
+ dy = (ymax - y) / 2 + box[1]
+ self.fp.write(b"gsave\n%f %f translate\n" % (dx, dy))
+ if (x, y) != im.size:
+ # EpsImagePlugin._save prints the image at (0,0,xsize,ysize)
+ sx = x / im.size[0]
+ sy = y / im.size[1]
+ self.fp.write(b"%f %f scale\n" % (sx, sy))
+ EpsImagePlugin._save(im, self.fp, None, 0)
+ self.fp.write(b"\ngrestore\n")
+
+
+# --------------------------------------------------------------------
+# PostScript driver
+
+#
+# EDROFF.PS -- PostScript driver for Edroff 2
+#
+# History:
+# 94-01-25 fl: created (edroff 2.04)
+#
+# Copyright (c) Fredrik Lundh 1994.
+#
+
+
+EDROFF_PS = b"""\
+/S { show } bind def
+/P { moveto show } bind def
+/M { moveto } bind def
+/X { 0 rmoveto } bind def
+/Y { 0 exch rmoveto } bind def
+/E { findfont
+ dup maxlength dict begin
+ {
+ 1 index /FID ne { def } { pop pop } ifelse
+ } forall
+ /Encoding exch def
+ dup /FontName exch def
+ currentdict end definefont pop
+} bind def
+/F { findfont exch scalefont dup setfont
+ [ exch /setfont cvx ] cvx bind def
+} bind def
+"""
+
+#
+# VDI.PS -- PostScript driver for VDI meta commands
+#
+# History:
+# 94-01-25 fl: created (edroff 2.04)
+#
+# Copyright (c) Fredrik Lundh 1994.
+#
+
+VDI_PS = b"""\
+/Vm { moveto } bind def
+/Va { newpath arcn stroke } bind def
+/Vl { moveto lineto stroke } bind def
+/Vc { newpath 0 360 arc closepath } bind def
+/Vr { exch dup 0 rlineto
+ exch dup 0 exch rlineto
+ exch neg 0 rlineto
+ 0 exch neg rlineto
+ setgray fill } bind def
+/Tm matrix def
+/Ve { Tm currentmatrix pop
+ translate scale newpath 0 0 .5 0 360 arc closepath
+ Tm setmatrix
+} bind def
+/Vf { currentgray exch setgray fill setgray } bind def
+"""
+
+#
+# ERROR.PS -- Error handler
+#
+# History:
+# 89-11-21 fl: created (pslist 1.10)
+#
+
+ERROR_PS = b"""\
+/landscape false def
+/errorBUF 200 string def
+/errorNL { currentpoint 10 sub exch pop 72 exch moveto } def
+errordict begin /handleerror {
+ initmatrix /Courier findfont 10 scalefont setfont
+ newpath 72 720 moveto $error begin /newerror false def
+ (PostScript Error) show errorNL errorNL
+ (Error: ) show
+ /errorname load errorBUF cvs show errorNL errorNL
+ (Command: ) show
+ /command load dup type /stringtype ne { errorBUF cvs } if show
+ errorNL errorNL
+ (VMstatus: ) show
+ vmstatus errorBUF cvs show ( bytes available, ) show
+ errorBUF cvs show ( bytes used at level ) show
+ errorBUF cvs show errorNL errorNL
+ (Operand stargck: ) show errorNL /ostargck load {
+ dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL
+ } forall errorNL
+ (Execution stargck: ) show errorNL /estargck load {
+ dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL
+ } forall
+ end showpage
+} def end
+"""
diff --git a/Lib/site-packages/PIL/PaletteFile.py b/Lib/site-packages/PIL/PaletteFile.py
new file mode 100644
index 0000000..dc31754
--- /dev/null
+++ b/Lib/site-packages/PIL/PaletteFile.py
@@ -0,0 +1,52 @@
+#
+# Python Imaging Library
+# $Id$
+#
+# stuff to read simple, teragon-style palette files
+#
+# History:
+# 97-08-23 fl Created
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1997.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+from ._binary import o8
+
+
+class PaletteFile:
+ """File handler for Teragon-style palette files."""
+
+ rawmode = "RGB"
+
+ def __init__(self, fp):
+ self.palette = [(i, i, i) for i in range(256)]
+
+ while True:
+ s = fp.readline()
+
+ if not s:
+ break
+ if s[:1] == b"#":
+ continue
+ if len(s) > 100:
+ msg = "bad palette file"
+ raise SyntaxError(msg)
+
+ v = [int(x) for x in s.split()]
+ try:
+ [i, r, g, b] = v
+ except ValueError:
+ [i, r] = v
+ g = b = r
+
+ if 0 <= i <= 255:
+ self.palette[i] = o8(r) + o8(g) + o8(b)
+
+ self.palette = b"".join(self.palette)
+
+ def getpalette(self):
+ return self.palette, self.rawmode
diff --git a/Lib/site-packages/PIL/PalmImagePlugin.py b/Lib/site-packages/PIL/PalmImagePlugin.py
new file mode 100644
index 0000000..65be7fe
--- /dev/null
+++ b/Lib/site-packages/PIL/PalmImagePlugin.py
@@ -0,0 +1,226 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+
+##
+# Image plugin for Palm pixmap images (output only).
+##
+from __future__ import annotations
+
+from . import Image, ImageFile
+from ._binary import o8
+from ._binary import o16be as o16b
+
+# fmt: off
+_Palm8BitColormapValues = (
+ (255, 255, 255), (255, 204, 255), (255, 153, 255), (255, 102, 255),
+ (255, 51, 255), (255, 0, 255), (255, 255, 204), (255, 204, 204),
+ (255, 153, 204), (255, 102, 204), (255, 51, 204), (255, 0, 204),
+ (255, 255, 153), (255, 204, 153), (255, 153, 153), (255, 102, 153),
+ (255, 51, 153), (255, 0, 153), (204, 255, 255), (204, 204, 255),
+ (204, 153, 255), (204, 102, 255), (204, 51, 255), (204, 0, 255),
+ (204, 255, 204), (204, 204, 204), (204, 153, 204), (204, 102, 204),
+ (204, 51, 204), (204, 0, 204), (204, 255, 153), (204, 204, 153),
+ (204, 153, 153), (204, 102, 153), (204, 51, 153), (204, 0, 153),
+ (153, 255, 255), (153, 204, 255), (153, 153, 255), (153, 102, 255),
+ (153, 51, 255), (153, 0, 255), (153, 255, 204), (153, 204, 204),
+ (153, 153, 204), (153, 102, 204), (153, 51, 204), (153, 0, 204),
+ (153, 255, 153), (153, 204, 153), (153, 153, 153), (153, 102, 153),
+ (153, 51, 153), (153, 0, 153), (102, 255, 255), (102, 204, 255),
+ (102, 153, 255), (102, 102, 255), (102, 51, 255), (102, 0, 255),
+ (102, 255, 204), (102, 204, 204), (102, 153, 204), (102, 102, 204),
+ (102, 51, 204), (102, 0, 204), (102, 255, 153), (102, 204, 153),
+ (102, 153, 153), (102, 102, 153), (102, 51, 153), (102, 0, 153),
+ (51, 255, 255), (51, 204, 255), (51, 153, 255), (51, 102, 255),
+ (51, 51, 255), (51, 0, 255), (51, 255, 204), (51, 204, 204),
+ (51, 153, 204), (51, 102, 204), (51, 51, 204), (51, 0, 204),
+ (51, 255, 153), (51, 204, 153), (51, 153, 153), (51, 102, 153),
+ (51, 51, 153), (51, 0, 153), (0, 255, 255), (0, 204, 255),
+ (0, 153, 255), (0, 102, 255), (0, 51, 255), (0, 0, 255),
+ (0, 255, 204), (0, 204, 204), (0, 153, 204), (0, 102, 204),
+ (0, 51, 204), (0, 0, 204), (0, 255, 153), (0, 204, 153),
+ (0, 153, 153), (0, 102, 153), (0, 51, 153), (0, 0, 153),
+ (255, 255, 102), (255, 204, 102), (255, 153, 102), (255, 102, 102),
+ (255, 51, 102), (255, 0, 102), (255, 255, 51), (255, 204, 51),
+ (255, 153, 51), (255, 102, 51), (255, 51, 51), (255, 0, 51),
+ (255, 255, 0), (255, 204, 0), (255, 153, 0), (255, 102, 0),
+ (255, 51, 0), (255, 0, 0), (204, 255, 102), (204, 204, 102),
+ (204, 153, 102), (204, 102, 102), (204, 51, 102), (204, 0, 102),
+ (204, 255, 51), (204, 204, 51), (204, 153, 51), (204, 102, 51),
+ (204, 51, 51), (204, 0, 51), (204, 255, 0), (204, 204, 0),
+ (204, 153, 0), (204, 102, 0), (204, 51, 0), (204, 0, 0),
+ (153, 255, 102), (153, 204, 102), (153, 153, 102), (153, 102, 102),
+ (153, 51, 102), (153, 0, 102), (153, 255, 51), (153, 204, 51),
+ (153, 153, 51), (153, 102, 51), (153, 51, 51), (153, 0, 51),
+ (153, 255, 0), (153, 204, 0), (153, 153, 0), (153, 102, 0),
+ (153, 51, 0), (153, 0, 0), (102, 255, 102), (102, 204, 102),
+ (102, 153, 102), (102, 102, 102), (102, 51, 102), (102, 0, 102),
+ (102, 255, 51), (102, 204, 51), (102, 153, 51), (102, 102, 51),
+ (102, 51, 51), (102, 0, 51), (102, 255, 0), (102, 204, 0),
+ (102, 153, 0), (102, 102, 0), (102, 51, 0), (102, 0, 0),
+ (51, 255, 102), (51, 204, 102), (51, 153, 102), (51, 102, 102),
+ (51, 51, 102), (51, 0, 102), (51, 255, 51), (51, 204, 51),
+ (51, 153, 51), (51, 102, 51), (51, 51, 51), (51, 0, 51),
+ (51, 255, 0), (51, 204, 0), (51, 153, 0), (51, 102, 0),
+ (51, 51, 0), (51, 0, 0), (0, 255, 102), (0, 204, 102),
+ (0, 153, 102), (0, 102, 102), (0, 51, 102), (0, 0, 102),
+ (0, 255, 51), (0, 204, 51), (0, 153, 51), (0, 102, 51),
+ (0, 51, 51), (0, 0, 51), (0, 255, 0), (0, 204, 0),
+ (0, 153, 0), (0, 102, 0), (0, 51, 0), (17, 17, 17),
+ (34, 34, 34), (68, 68, 68), (85, 85, 85), (119, 119, 119),
+ (136, 136, 136), (170, 170, 170), (187, 187, 187), (221, 221, 221),
+ (238, 238, 238), (192, 192, 192), (128, 0, 0), (128, 0, 128),
+ (0, 128, 0), (0, 128, 128), (0, 0, 0), (0, 0, 0),
+ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0),
+ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0),
+ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0),
+ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0),
+ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0),
+ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0))
+# fmt: on
+
+
+# so build a prototype image to be used for palette resampling
+def build_prototype_image():
+ image = Image.new("L", (1, len(_Palm8BitColormapValues)))
+ image.putdata(list(range(len(_Palm8BitColormapValues))))
+ palettedata = ()
+ for colormapValue in _Palm8BitColormapValues:
+ palettedata += colormapValue
+ palettedata += (0, 0, 0) * (256 - len(_Palm8BitColormapValues))
+ image.putpalette(palettedata)
+ return image
+
+
+Palm8BitColormapImage = build_prototype_image()
+
+# OK, we now have in Palm8BitColormapImage,
+# a "P"-mode image with the right palette
+#
+# --------------------------------------------------------------------
+
+_FLAGS = {"custom-colormap": 0x4000, "is-compressed": 0x8000, "has-transparent": 0x2000}
+
+_COMPRESSION_TYPES = {"none": 0xFF, "rle": 0x01, "scanline": 0x00}
+
+
+#
+# --------------------------------------------------------------------
+
+##
+# (Internal) Image save plugin for the Palm format.
+
+
+def _save(im, fp, filename):
+ if im.mode == "P":
+ # we assume this is a color Palm image with the standard colormap,
+ # unless the "info" dict has a "custom-colormap" field
+
+ rawmode = "P"
+ bpp = 8
+ version = 1
+
+ elif im.mode == "L":
+ if im.encoderinfo.get("bpp") in (1, 2, 4):
+ # this is 8-bit grayscale, so we shift it to get the high-order bits,
+ # and invert it because
+ # Palm does grayscale from white (0) to black (1)
+ bpp = im.encoderinfo["bpp"]
+ im = im.point(
+ lambda x, shift=8 - bpp, maxval=(1 << bpp) - 1: maxval - (x >> shift)
+ )
+ elif im.info.get("bpp") in (1, 2, 4):
+ # here we assume that even though the inherent mode is 8-bit grayscale,
+ # only the lower bpp bits are significant.
+ # We invert them to match the Palm.
+ bpp = im.info["bpp"]
+ im = im.point(lambda x, maxval=(1 << bpp) - 1: maxval - (x & maxval))
+ else:
+ msg = f"cannot write mode {im.mode} as Palm"
+ raise OSError(msg)
+
+ # we ignore the palette here
+ im.mode = "P"
+ rawmode = "P;" + str(bpp)
+ version = 1
+
+ elif im.mode == "1":
+ # monochrome -- write it inverted, as is the Palm standard
+ rawmode = "1;I"
+ bpp = 1
+ version = 0
+
+ else:
+ msg = f"cannot write mode {im.mode} as Palm"
+ raise OSError(msg)
+
+ #
+ # make sure image data is available
+ im.load()
+
+ # write header
+
+ cols = im.size[0]
+ rows = im.size[1]
+
+ rowbytes = int((cols + (16 // bpp - 1)) / (16 // bpp)) * 2
+ transparent_index = 0
+ compression_type = _COMPRESSION_TYPES["none"]
+
+ flags = 0
+ if im.mode == "P" and "custom-colormap" in im.info:
+ flags = flags & _FLAGS["custom-colormap"]
+ colormapsize = 4 * 256 + 2
+ colormapmode = im.palette.mode
+ colormap = im.getdata().getpalette()
+ else:
+ colormapsize = 0
+
+ if "offset" in im.info:
+ offset = (rowbytes * rows + 16 + 3 + colormapsize) // 4
+ else:
+ offset = 0
+
+ fp.write(o16b(cols) + o16b(rows) + o16b(rowbytes) + o16b(flags))
+ fp.write(o8(bpp))
+ fp.write(o8(version))
+ fp.write(o16b(offset))
+ fp.write(o8(transparent_index))
+ fp.write(o8(compression_type))
+ fp.write(o16b(0)) # reserved by Palm
+
+ # now write colormap if necessary
+
+ if colormapsize > 0:
+ fp.write(o16b(256))
+ for i in range(256):
+ fp.write(o8(i))
+ if colormapmode == "RGB":
+ fp.write(
+ o8(colormap[3 * i])
+ + o8(colormap[3 * i + 1])
+ + o8(colormap[3 * i + 2])
+ )
+ elif colormapmode == "RGBA":
+ fp.write(
+ o8(colormap[4 * i])
+ + o8(colormap[4 * i + 1])
+ + o8(colormap[4 * i + 2])
+ )
+
+ # now convert data to raw form
+ ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, rowbytes, 1))])
+
+ if hasattr(fp, "flush"):
+ fp.flush()
+
+
+#
+# --------------------------------------------------------------------
+
+Image.register_save("Palm", _save)
+
+Image.register_extension("Palm", ".palm")
+
+Image.register_mime("Palm", "image/palm")
diff --git a/Lib/site-packages/PIL/PcdImagePlugin.py b/Lib/site-packages/PIL/PcdImagePlugin.py
new file mode 100644
index 0000000..a0515b3
--- /dev/null
+++ b/Lib/site-packages/PIL/PcdImagePlugin.py
@@ -0,0 +1,62 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# PCD file handling
+#
+# History:
+# 96-05-10 fl Created
+# 96-05-27 fl Added draft mode (128x192, 256x384)
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1996.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+from . import Image, ImageFile
+
+##
+# Image plugin for PhotoCD images. This plugin only reads the 768x512
+# image from the file; higher resolutions are encoded in a proprietary
+# encoding.
+
+
+class PcdImageFile(ImageFile.ImageFile):
+ format = "PCD"
+ format_description = "Kodak PhotoCD"
+
+ def _open(self):
+ # rough
+ self.fp.seek(2048)
+ s = self.fp.read(2048)
+
+ if s[:4] != b"PCD_":
+ msg = "not a PCD file"
+ raise SyntaxError(msg)
+
+ orientation = s[1538] & 3
+ self.tile_post_rotate = None
+ if orientation == 1:
+ self.tile_post_rotate = 90
+ elif orientation == 3:
+ self.tile_post_rotate = -90
+
+ self._mode = "RGB"
+ self._size = 768, 512 # FIXME: not correct for rotated images!
+ self.tile = [("pcd", (0, 0) + self.size, 96 * 2048, None)]
+
+ def load_end(self):
+ if self.tile_post_rotate:
+ # Handle rotated PCDs
+ self.im = self.im.rotate(self.tile_post_rotate)
+ self._size = self.im.size
+
+
+#
+# registry
+
+Image.register_open(PcdImageFile.format, PcdImageFile)
+
+Image.register_extension(PcdImageFile.format, ".pcd")
diff --git a/Lib/site-packages/PIL/PcfFontFile.py b/Lib/site-packages/PIL/PcfFontFile.py
new file mode 100644
index 0000000..0d1968b
--- /dev/null
+++ b/Lib/site-packages/PIL/PcfFontFile.py
@@ -0,0 +1,254 @@
+#
+# THIS IS WORK IN PROGRESS
+#
+# The Python Imaging Library
+# $Id$
+#
+# portable compiled font file parser
+#
+# history:
+# 1997-08-19 fl created
+# 2003-09-13 fl fixed loading of unicode fonts
+#
+# Copyright (c) 1997-2003 by Secret Labs AB.
+# Copyright (c) 1997-2003 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import io
+from typing import BinaryIO, Callable
+
+from . import FontFile, Image
+from ._binary import i8
+from ._binary import i16be as b16
+from ._binary import i16le as l16
+from ._binary import i32be as b32
+from ._binary import i32le as l32
+
+# --------------------------------------------------------------------
+# declarations
+
+PCF_MAGIC = 0x70636601 # "\x01fcp"
+
+PCF_PROPERTIES = 1 << 0
+PCF_ACCELERATORS = 1 << 1
+PCF_METRICS = 1 << 2
+PCF_BITMAPS = 1 << 3
+PCF_INK_METRICS = 1 << 4
+PCF_BDF_ENCODINGS = 1 << 5
+PCF_SWIDTHS = 1 << 6
+PCF_GLYPH_NAMES = 1 << 7
+PCF_BDF_ACCELERATORS = 1 << 8
+
+BYTES_PER_ROW: list[Callable[[int], int]] = [
+ lambda bits: ((bits + 7) >> 3),
+ lambda bits: ((bits + 15) >> 3) & ~1,
+ lambda bits: ((bits + 31) >> 3) & ~3,
+ lambda bits: ((bits + 63) >> 3) & ~7,
+]
+
+
+def sz(s: bytes, o: int) -> bytes:
+ return s[o : s.index(b"\0", o)]
+
+
+class PcfFontFile(FontFile.FontFile):
+ """Font file plugin for the X11 PCF format."""
+
+ name = "name"
+
+ def __init__(self, fp: BinaryIO, charset_encoding: str = "iso8859-1"):
+ self.charset_encoding = charset_encoding
+
+ magic = l32(fp.read(4))
+ if magic != PCF_MAGIC:
+ msg = "not a PCF file"
+ raise SyntaxError(msg)
+
+ super().__init__()
+
+ count = l32(fp.read(4))
+ self.toc = {}
+ for i in range(count):
+ type = l32(fp.read(4))
+ self.toc[type] = l32(fp.read(4)), l32(fp.read(4)), l32(fp.read(4))
+
+ self.fp = fp
+
+ self.info = self._load_properties()
+
+ metrics = self._load_metrics()
+ bitmaps = self._load_bitmaps(metrics)
+ encoding = self._load_encoding()
+
+ #
+ # create glyph structure
+
+ for ch, ix in enumerate(encoding):
+ if ix is not None:
+ (
+ xsize,
+ ysize,
+ left,
+ right,
+ width,
+ ascent,
+ descent,
+ attributes,
+ ) = metrics[ix]
+ self.glyph[ch] = (
+ (width, 0),
+ (left, descent - ysize, xsize + left, descent),
+ (0, 0, xsize, ysize),
+ bitmaps[ix],
+ )
+
+ def _getformat(
+ self, tag: int
+ ) -> tuple[BinaryIO, int, Callable[[bytes], int], Callable[[bytes], int]]:
+ format, size, offset = self.toc[tag]
+
+ fp = self.fp
+ fp.seek(offset)
+
+ format = l32(fp.read(4))
+
+ if format & 4:
+ i16, i32 = b16, b32
+ else:
+ i16, i32 = l16, l32
+
+ return fp, format, i16, i32
+
+ def _load_properties(self) -> dict[bytes, bytes | int]:
+ #
+ # font properties
+
+ properties = {}
+
+ fp, format, i16, i32 = self._getformat(PCF_PROPERTIES)
+
+ nprops = i32(fp.read(4))
+
+ # read property description
+ p = [(i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4))) for _ in range(nprops)]
+
+ if nprops & 3:
+ fp.seek(4 - (nprops & 3), io.SEEK_CUR) # pad
+
+ data = fp.read(i32(fp.read(4)))
+
+ for k, s, v in p:
+ property_value: bytes | int = sz(data, v) if s else v
+ properties[sz(data, k)] = property_value
+
+ return properties
+
+ def _load_metrics(self) -> list[tuple[int, int, int, int, int, int, int, int]]:
+ #
+ # font metrics
+
+ metrics: list[tuple[int, int, int, int, int, int, int, int]] = []
+
+ fp, format, i16, i32 = self._getformat(PCF_METRICS)
+
+ append = metrics.append
+
+ if (format & 0xFF00) == 0x100:
+ # "compressed" metrics
+ for i in range(i16(fp.read(2))):
+ left = i8(fp.read(1)) - 128
+ right = i8(fp.read(1)) - 128
+ width = i8(fp.read(1)) - 128
+ ascent = i8(fp.read(1)) - 128
+ descent = i8(fp.read(1)) - 128
+ xsize = right - left
+ ysize = ascent + descent
+ append((xsize, ysize, left, right, width, ascent, descent, 0))
+
+ else:
+ # "jumbo" metrics
+ for i in range(i32(fp.read(4))):
+ left = i16(fp.read(2))
+ right = i16(fp.read(2))
+ width = i16(fp.read(2))
+ ascent = i16(fp.read(2))
+ descent = i16(fp.read(2))
+ attributes = i16(fp.read(2))
+ xsize = right - left
+ ysize = ascent + descent
+ append((xsize, ysize, left, right, width, ascent, descent, attributes))
+
+ return metrics
+
+ def _load_bitmaps(
+ self, metrics: list[tuple[int, int, int, int, int, int, int, int]]
+ ) -> list[Image.Image]:
+ #
+ # bitmap data
+
+ fp, format, i16, i32 = self._getformat(PCF_BITMAPS)
+
+ nbitmaps = i32(fp.read(4))
+
+ if nbitmaps != len(metrics):
+ msg = "Wrong number of bitmaps"
+ raise OSError(msg)
+
+ offsets = [i32(fp.read(4)) for _ in range(nbitmaps)]
+
+ bitmap_sizes = [i32(fp.read(4)) for _ in range(4)]
+
+ # byteorder = format & 4 # non-zero => MSB
+ bitorder = format & 8 # non-zero => MSB
+ padindex = format & 3
+
+ bitmapsize = bitmap_sizes[padindex]
+ offsets.append(bitmapsize)
+
+ data = fp.read(bitmapsize)
+
+ pad = BYTES_PER_ROW[padindex]
+ mode = "1;R"
+ if bitorder:
+ mode = "1"
+
+ bitmaps = []
+ for i in range(nbitmaps):
+ xsize, ysize = metrics[i][:2]
+ b, e = offsets[i : i + 2]
+ bitmaps.append(
+ Image.frombytes("1", (xsize, ysize), data[b:e], "raw", mode, pad(xsize))
+ )
+
+ return bitmaps
+
+ def _load_encoding(self) -> list[int | None]:
+ fp, format, i16, i32 = self._getformat(PCF_BDF_ENCODINGS)
+
+ first_col, last_col = i16(fp.read(2)), i16(fp.read(2))
+ first_row, last_row = i16(fp.read(2)), i16(fp.read(2))
+
+ i16(fp.read(2)) # default
+
+ nencoding = (last_col - first_col + 1) * (last_row - first_row + 1)
+
+ # map character code to bitmap index
+ encoding: list[int | None] = [None] * min(256, nencoding)
+
+ encoding_offsets = [i16(fp.read(2)) for _ in range(nencoding)]
+
+ for i in range(first_col, len(encoding)):
+ try:
+ encoding_offset = encoding_offsets[
+ ord(bytearray([i]).decode(self.charset_encoding))
+ ]
+ if encoding_offset != 0xFFFF:
+ encoding[i] = encoding_offset
+ except UnicodeDecodeError:
+ # character is not supported in selected encoding
+ pass
+
+ return encoding
diff --git a/Lib/site-packages/PIL/PcxImagePlugin.py b/Lib/site-packages/PIL/PcxImagePlugin.py
new file mode 100644
index 0000000..98ecefd
--- /dev/null
+++ b/Lib/site-packages/PIL/PcxImagePlugin.py
@@ -0,0 +1,222 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# PCX file handling
+#
+# This format was originally used by ZSoft's popular PaintBrush
+# program for the IBM PC. It is also supported by many MS-DOS and
+# Windows applications, including the Windows PaintBrush program in
+# Windows 3.
+#
+# history:
+# 1995-09-01 fl Created
+# 1996-05-20 fl Fixed RGB support
+# 1997-01-03 fl Fixed 2-bit and 4-bit support
+# 1999-02-03 fl Fixed 8-bit support (broken in 1.0b1)
+# 1999-02-07 fl Added write support
+# 2002-06-09 fl Made 2-bit and 4-bit support a bit more robust
+# 2002-07-30 fl Seek from to current position, not beginning of file
+# 2003-06-03 fl Extract DPI settings (info["dpi"])
+#
+# Copyright (c) 1997-2003 by Secret Labs AB.
+# Copyright (c) 1995-2003 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import io
+import logging
+
+from . import Image, ImageFile, ImagePalette
+from ._binary import i16le as i16
+from ._binary import o8
+from ._binary import o16le as o16
+
+logger = logging.getLogger(__name__)
+
+
+def _accept(prefix):
+ return prefix[0] == 10 and prefix[1] in [0, 2, 3, 5]
+
+
+##
+# Image plugin for Paintbrush images.
+
+
+class PcxImageFile(ImageFile.ImageFile):
+ format = "PCX"
+ format_description = "Paintbrush"
+
+ def _open(self):
+ # header
+ s = self.fp.read(128)
+ if not _accept(s):
+ msg = "not a PCX file"
+ raise SyntaxError(msg)
+
+ # image
+ bbox = i16(s, 4), i16(s, 6), i16(s, 8) + 1, i16(s, 10) + 1
+ if bbox[2] <= bbox[0] or bbox[3] <= bbox[1]:
+ msg = "bad PCX image size"
+ raise SyntaxError(msg)
+ logger.debug("BBox: %s %s %s %s", *bbox)
+
+ # format
+ version = s[1]
+ bits = s[3]
+ planes = s[65]
+ provided_stride = i16(s, 66)
+ logger.debug(
+ "PCX version %s, bits %s, planes %s, stride %s",
+ version,
+ bits,
+ planes,
+ provided_stride,
+ )
+
+ self.info["dpi"] = i16(s, 12), i16(s, 14)
+
+ if bits == 1 and planes == 1:
+ mode = rawmode = "1"
+
+ elif bits == 1 and planes in (2, 4):
+ mode = "P"
+ rawmode = "P;%dL" % planes
+ self.palette = ImagePalette.raw("RGB", s[16:64])
+
+ elif version == 5 and bits == 8 and planes == 1:
+ mode = rawmode = "L"
+ # FIXME: hey, this doesn't work with the incremental loader !!!
+ self.fp.seek(-769, io.SEEK_END)
+ s = self.fp.read(769)
+ if len(s) == 769 and s[0] == 12:
+ # check if the palette is linear grayscale
+ for i in range(256):
+ if s[i * 3 + 1 : i * 3 + 4] != o8(i) * 3:
+ mode = rawmode = "P"
+ break
+ if mode == "P":
+ self.palette = ImagePalette.raw("RGB", s[1:])
+ self.fp.seek(128)
+
+ elif version == 5 and bits == 8 and planes == 3:
+ mode = "RGB"
+ rawmode = "RGB;L"
+
+ else:
+ msg = "unknown PCX mode"
+ raise OSError(msg)
+
+ self._mode = mode
+ self._size = bbox[2] - bbox[0], bbox[3] - bbox[1]
+
+ # Don't trust the passed in stride.
+ # Calculate the approximate position for ourselves.
+ # CVE-2020-35653
+ stride = (self._size[0] * bits + 7) // 8
+
+ # While the specification states that this must be even,
+ # not all images follow this
+ if provided_stride != stride:
+ stride += stride % 2
+
+ bbox = (0, 0) + self.size
+ logger.debug("size: %sx%s", *self.size)
+
+ self.tile = [("pcx", bbox, self.fp.tell(), (rawmode, planes * stride))]
+
+
+# --------------------------------------------------------------------
+# save PCX files
+
+
+SAVE = {
+ # mode: (version, bits, planes, raw mode)
+ "1": (2, 1, 1, "1"),
+ "L": (5, 8, 1, "L"),
+ "P": (5, 8, 1, "P"),
+ "RGB": (5, 8, 3, "RGB;L"),
+}
+
+
+def _save(im, fp, filename):
+ try:
+ version, bits, planes, rawmode = SAVE[im.mode]
+ except KeyError as e:
+ msg = f"Cannot save {im.mode} images as PCX"
+ raise ValueError(msg) from e
+
+ # bytes per plane
+ stride = (im.size[0] * bits + 7) // 8
+ # stride should be even
+ stride += stride % 2
+ # Stride needs to be kept in sync with the PcxEncode.c version.
+ # Ideally it should be passed in in the state, but the bytes value
+ # gets overwritten.
+
+ logger.debug(
+ "PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d",
+ im.size[0],
+ bits,
+ stride,
+ )
+
+ # under windows, we could determine the current screen size with
+ # "Image.core.display_mode()[1]", but I think that's overkill...
+
+ screen = im.size
+
+ dpi = 100, 100
+
+ # PCX header
+ fp.write(
+ o8(10)
+ + o8(version)
+ + o8(1)
+ + o8(bits)
+ + o16(0)
+ + o16(0)
+ + o16(im.size[0] - 1)
+ + o16(im.size[1] - 1)
+ + o16(dpi[0])
+ + o16(dpi[1])
+ + b"\0" * 24
+ + b"\xFF" * 24
+ + b"\0"
+ + o8(planes)
+ + o16(stride)
+ + o16(1)
+ + o16(screen[0])
+ + o16(screen[1])
+ + b"\0" * 54
+ )
+
+ assert fp.tell() == 128
+
+ ImageFile._save(im, fp, [("pcx", (0, 0) + im.size, 0, (rawmode, bits * planes))])
+
+ if im.mode == "P":
+ # colour palette
+ fp.write(o8(12))
+ palette = im.im.getpalette("RGB", "RGB")
+ palette += b"\x00" * (768 - len(palette))
+ fp.write(palette) # 768 bytes
+ elif im.mode == "L":
+ # grayscale palette
+ fp.write(o8(12))
+ for i in range(256):
+ fp.write(o8(i) * 3)
+
+
+# --------------------------------------------------------------------
+# registry
+
+
+Image.register_open(PcxImageFile.format, PcxImageFile, _accept)
+Image.register_save(PcxImageFile.format, _save)
+
+Image.register_extension(PcxImageFile.format, ".pcx")
+
+Image.register_mime(PcxImageFile.format, "image/x-pcx")
diff --git a/Lib/site-packages/PIL/PdfImagePlugin.py b/Lib/site-packages/PIL/PdfImagePlugin.py
new file mode 100644
index 0000000..3506aad
--- /dev/null
+++ b/Lib/site-packages/PIL/PdfImagePlugin.py
@@ -0,0 +1,303 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# PDF (Acrobat) file handling
+#
+# History:
+# 1996-07-16 fl Created
+# 1997-01-18 fl Fixed header
+# 2004-02-21 fl Fixes for 1/L/CMYK images, etc.
+# 2004-02-24 fl Fixes for 1 and P images.
+#
+# Copyright (c) 1997-2004 by Secret Labs AB. All rights reserved.
+# Copyright (c) 1996-1997 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+
+##
+# Image plugin for PDF images (output only).
+##
+from __future__ import annotations
+
+import io
+import math
+import os
+import time
+
+from . import Image, ImageFile, ImageSequence, PdfParser, __version__, features
+
+#
+# --------------------------------------------------------------------
+
+# object ids:
+# 1. catalogue
+# 2. pages
+# 3. image
+# 4. page
+# 5. page contents
+
+
+def _save_all(im, fp, filename):
+ _save(im, fp, filename, save_all=True)
+
+
+##
+# (Internal) Image save plugin for the PDF format.
+
+
+def _write_image(im, filename, existing_pdf, image_refs):
+ # FIXME: Should replace ASCIIHexDecode with RunLengthDecode
+ # (packbits) or LZWDecode (tiff/lzw compression). Note that
+ # PDF 1.2 also supports Flatedecode (zip compression).
+
+ params = None
+ decode = None
+
+ #
+ # Get image characteristics
+
+ width, height = im.size
+
+ dict_obj = {"BitsPerComponent": 8}
+ if im.mode == "1":
+ if features.check("libtiff"):
+ filter = "CCITTFaxDecode"
+ dict_obj["BitsPerComponent"] = 1
+ params = PdfParser.PdfArray(
+ [
+ PdfParser.PdfDict(
+ {
+ "K": -1,
+ "BlackIs1": True,
+ "Columns": width,
+ "Rows": height,
+ }
+ )
+ ]
+ )
+ else:
+ filter = "DCTDecode"
+ dict_obj["ColorSpace"] = PdfParser.PdfName("DeviceGray")
+ procset = "ImageB" # grayscale
+ elif im.mode == "L":
+ filter = "DCTDecode"
+ # params = f"<< /Predictor 15 /Columns {width-2} >>"
+ dict_obj["ColorSpace"] = PdfParser.PdfName("DeviceGray")
+ procset = "ImageB" # grayscale
+ elif im.mode == "LA":
+ filter = "JPXDecode"
+ # params = f"<< /Predictor 15 /Columns {width-2} >>"
+ procset = "ImageB" # grayscale
+ dict_obj["SMaskInData"] = 1
+ elif im.mode == "P":
+ filter = "ASCIIHexDecode"
+ palette = im.getpalette()
+ dict_obj["ColorSpace"] = [
+ PdfParser.PdfName("Indexed"),
+ PdfParser.PdfName("DeviceRGB"),
+ len(palette) // 3 - 1,
+ PdfParser.PdfBinary(palette),
+ ]
+ procset = "ImageI" # indexed color
+
+ if "transparency" in im.info:
+ smask = im.convert("LA").getchannel("A")
+ smask.encoderinfo = {}
+
+ image_ref = _write_image(smask, filename, existing_pdf, image_refs)[0]
+ dict_obj["SMask"] = image_ref
+ elif im.mode == "RGB":
+ filter = "DCTDecode"
+ dict_obj["ColorSpace"] = PdfParser.PdfName("DeviceRGB")
+ procset = "ImageC" # color images
+ elif im.mode == "RGBA":
+ filter = "JPXDecode"
+ procset = "ImageC" # color images
+ dict_obj["SMaskInData"] = 1
+ elif im.mode == "CMYK":
+ filter = "DCTDecode"
+ dict_obj["ColorSpace"] = PdfParser.PdfName("DeviceCMYK")
+ procset = "ImageC" # color images
+ decode = [1, 0, 1, 0, 1, 0, 1, 0]
+ else:
+ msg = f"cannot save mode {im.mode}"
+ raise ValueError(msg)
+
+ #
+ # image
+
+ op = io.BytesIO()
+
+ if filter == "ASCIIHexDecode":
+ ImageFile._save(im, op, [("hex", (0, 0) + im.size, 0, im.mode)])
+ elif filter == "CCITTFaxDecode":
+ im.save(
+ op,
+ "TIFF",
+ compression="group4",
+ # use a single strip
+ strip_size=math.ceil(width / 8) * height,
+ )
+ elif filter == "DCTDecode":
+ Image.SAVE["JPEG"](im, op, filename)
+ elif filter == "JPXDecode":
+ del dict_obj["BitsPerComponent"]
+ Image.SAVE["JPEG2000"](im, op, filename)
+ else:
+ msg = f"unsupported PDF filter ({filter})"
+ raise ValueError(msg)
+
+ stream = op.getvalue()
+ if filter == "CCITTFaxDecode":
+ stream = stream[8:]
+ filter = PdfParser.PdfArray([PdfParser.PdfName(filter)])
+ else:
+ filter = PdfParser.PdfName(filter)
+
+ image_ref = image_refs.pop(0)
+ existing_pdf.write_obj(
+ image_ref,
+ stream=stream,
+ Type=PdfParser.PdfName("XObject"),
+ Subtype=PdfParser.PdfName("Image"),
+ Width=width, # * 72.0 / x_resolution,
+ Height=height, # * 72.0 / y_resolution,
+ Filter=filter,
+ Decode=decode,
+ DecodeParms=params,
+ **dict_obj,
+ )
+
+ return image_ref, procset
+
+
+def _save(im, fp, filename, save_all=False):
+ is_appending = im.encoderinfo.get("append", False)
+ if is_appending:
+ existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="r+b")
+ else:
+ existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="w+b")
+
+ dpi = im.encoderinfo.get("dpi")
+ if dpi:
+ x_resolution = dpi[0]
+ y_resolution = dpi[1]
+ else:
+ x_resolution = y_resolution = im.encoderinfo.get("resolution", 72.0)
+
+ info = {
+ "title": None
+ if is_appending
+ else os.path.splitext(os.path.basename(filename))[0],
+ "author": None,
+ "subject": None,
+ "keywords": None,
+ "creator": None,
+ "producer": None,
+ "creationDate": None if is_appending else time.gmtime(),
+ "modDate": None if is_appending else time.gmtime(),
+ }
+ for k, default in info.items():
+ v = im.encoderinfo.get(k) if k in im.encoderinfo else default
+ if v:
+ existing_pdf.info[k[0].upper() + k[1:]] = v
+
+ #
+ # make sure image data is available
+ im.load()
+
+ existing_pdf.start_writing()
+ existing_pdf.write_header()
+ existing_pdf.write_comment(f"created by Pillow {__version__} PDF driver")
+
+ #
+ # pages
+ ims = [im]
+ if save_all:
+ append_images = im.encoderinfo.get("append_images", [])
+ for append_im in append_images:
+ append_im.encoderinfo = im.encoderinfo.copy()
+ ims.append(append_im)
+ number_of_pages = 0
+ image_refs = []
+ page_refs = []
+ contents_refs = []
+ for im in ims:
+ im_number_of_pages = 1
+ if save_all:
+ try:
+ im_number_of_pages = im.n_frames
+ except AttributeError:
+ # Image format does not have n_frames.
+ # It is a single frame image
+ pass
+ number_of_pages += im_number_of_pages
+ for i in range(im_number_of_pages):
+ image_refs.append(existing_pdf.next_object_id(0))
+ if im.mode == "P" and "transparency" in im.info:
+ image_refs.append(existing_pdf.next_object_id(0))
+
+ page_refs.append(existing_pdf.next_object_id(0))
+ contents_refs.append(existing_pdf.next_object_id(0))
+ existing_pdf.pages.append(page_refs[-1])
+
+ #
+ # catalog and list of pages
+ existing_pdf.write_catalog()
+
+ page_number = 0
+ for im_sequence in ims:
+ im_pages = ImageSequence.Iterator(im_sequence) if save_all else [im_sequence]
+ for im in im_pages:
+ image_ref, procset = _write_image(im, filename, existing_pdf, image_refs)
+
+ #
+ # page
+
+ existing_pdf.write_page(
+ page_refs[page_number],
+ Resources=PdfParser.PdfDict(
+ ProcSet=[PdfParser.PdfName("PDF"), PdfParser.PdfName(procset)],
+ XObject=PdfParser.PdfDict(image=image_ref),
+ ),
+ MediaBox=[
+ 0,
+ 0,
+ im.width * 72.0 / x_resolution,
+ im.height * 72.0 / y_resolution,
+ ],
+ Contents=contents_refs[page_number],
+ )
+
+ #
+ # page contents
+
+ page_contents = b"q %f 0 0 %f 0 0 cm /image Do Q\n" % (
+ im.width * 72.0 / x_resolution,
+ im.height * 72.0 / y_resolution,
+ )
+
+ existing_pdf.write_obj(contents_refs[page_number], stream=page_contents)
+
+ page_number += 1
+
+ #
+ # trailer
+ existing_pdf.write_xref_and_trailer()
+ if hasattr(fp, "flush"):
+ fp.flush()
+ existing_pdf.close()
+
+
+#
+# --------------------------------------------------------------------
+
+
+Image.register_save("PDF", _save)
+Image.register_save_all("PDF", _save_all)
+
+Image.register_extension("PDF", ".pdf")
+
+Image.register_mime("PDF", "application/pdf")
diff --git a/Lib/site-packages/PIL/PdfParser.py b/Lib/site-packages/PIL/PdfParser.py
new file mode 100644
index 0000000..0144600
--- /dev/null
+++ b/Lib/site-packages/PIL/PdfParser.py
@@ -0,0 +1,998 @@
+from __future__ import annotations
+
+import calendar
+import codecs
+import collections
+import mmap
+import os
+import re
+import time
+import zlib
+
+
+# see 7.9.2.2 Text String Type on page 86 and D.3 PDFDocEncoding Character Set
+# on page 656
+def encode_text(s):
+ return codecs.BOM_UTF16_BE + s.encode("utf_16_be")
+
+
+PDFDocEncoding = {
+ 0x16: "\u0017",
+ 0x18: "\u02D8",
+ 0x19: "\u02C7",
+ 0x1A: "\u02C6",
+ 0x1B: "\u02D9",
+ 0x1C: "\u02DD",
+ 0x1D: "\u02DB",
+ 0x1E: "\u02DA",
+ 0x1F: "\u02DC",
+ 0x80: "\u2022",
+ 0x81: "\u2020",
+ 0x82: "\u2021",
+ 0x83: "\u2026",
+ 0x84: "\u2014",
+ 0x85: "\u2013",
+ 0x86: "\u0192",
+ 0x87: "\u2044",
+ 0x88: "\u2039",
+ 0x89: "\u203A",
+ 0x8A: "\u2212",
+ 0x8B: "\u2030",
+ 0x8C: "\u201E",
+ 0x8D: "\u201C",
+ 0x8E: "\u201D",
+ 0x8F: "\u2018",
+ 0x90: "\u2019",
+ 0x91: "\u201A",
+ 0x92: "\u2122",
+ 0x93: "\uFB01",
+ 0x94: "\uFB02",
+ 0x95: "\u0141",
+ 0x96: "\u0152",
+ 0x97: "\u0160",
+ 0x98: "\u0178",
+ 0x99: "\u017D",
+ 0x9A: "\u0131",
+ 0x9B: "\u0142",
+ 0x9C: "\u0153",
+ 0x9D: "\u0161",
+ 0x9E: "\u017E",
+ 0xA0: "\u20AC",
+}
+
+
+def decode_text(b):
+ if b[: len(codecs.BOM_UTF16_BE)] == codecs.BOM_UTF16_BE:
+ return b[len(codecs.BOM_UTF16_BE) :].decode("utf_16_be")
+ else:
+ return "".join(PDFDocEncoding.get(byte, chr(byte)) for byte in b)
+
+
+class PdfFormatError(RuntimeError):
+ """An error that probably indicates a syntactic or semantic error in the
+ PDF file structure"""
+
+ pass
+
+
+def check_format_condition(condition, error_message):
+ if not condition:
+ raise PdfFormatError(error_message)
+
+
+class IndirectReference(
+ collections.namedtuple("IndirectReferenceTuple", ["object_id", "generation"])
+):
+ def __str__(self):
+ return f"{self.object_id} {self.generation} R"
+
+ def __bytes__(self):
+ return self.__str__().encode("us-ascii")
+
+ def __eq__(self, other):
+ return (
+ other.__class__ is self.__class__
+ and other.object_id == self.object_id
+ and other.generation == self.generation
+ )
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __hash__(self):
+ return hash((self.object_id, self.generation))
+
+
+class IndirectObjectDef(IndirectReference):
+ def __str__(self):
+ return f"{self.object_id} {self.generation} obj"
+
+
+class XrefTable:
+ def __init__(self):
+ self.existing_entries = {} # object ID => (offset, generation)
+ self.new_entries = {} # object ID => (offset, generation)
+ self.deleted_entries = {0: 65536} # object ID => generation
+ self.reading_finished = False
+
+ def __setitem__(self, key, value):
+ if self.reading_finished:
+ self.new_entries[key] = value
+ else:
+ self.existing_entries[key] = value
+ if key in self.deleted_entries:
+ del self.deleted_entries[key]
+
+ def __getitem__(self, key):
+ try:
+ return self.new_entries[key]
+ except KeyError:
+ return self.existing_entries[key]
+
+ def __delitem__(self, key):
+ if key in self.new_entries:
+ generation = self.new_entries[key][1] + 1
+ del self.new_entries[key]
+ self.deleted_entries[key] = generation
+ elif key in self.existing_entries:
+ generation = self.existing_entries[key][1] + 1
+ self.deleted_entries[key] = generation
+ elif key in self.deleted_entries:
+ generation = self.deleted_entries[key]
+ else:
+ msg = (
+ "object ID " + str(key) + " cannot be deleted because it doesn't exist"
+ )
+ raise IndexError(msg)
+
+ def __contains__(self, key):
+ return key in self.existing_entries or key in self.new_entries
+
+ def __len__(self):
+ return len(
+ set(self.existing_entries.keys())
+ | set(self.new_entries.keys())
+ | set(self.deleted_entries.keys())
+ )
+
+ def keys(self):
+ return (
+ set(self.existing_entries.keys()) - set(self.deleted_entries.keys())
+ ) | set(self.new_entries.keys())
+
+ def write(self, f):
+ keys = sorted(set(self.new_entries.keys()) | set(self.deleted_entries.keys()))
+ deleted_keys = sorted(set(self.deleted_entries.keys()))
+ startxref = f.tell()
+ f.write(b"xref\n")
+ while keys:
+ # find a contiguous sequence of object IDs
+ prev = None
+ for index, key in enumerate(keys):
+ if prev is None or prev + 1 == key:
+ prev = key
+ else:
+ contiguous_keys = keys[:index]
+ keys = keys[index:]
+ break
+ else:
+ contiguous_keys = keys
+ keys = None
+ f.write(b"%d %d\n" % (contiguous_keys[0], len(contiguous_keys)))
+ for object_id in contiguous_keys:
+ if object_id in self.new_entries:
+ f.write(b"%010d %05d n \n" % self.new_entries[object_id])
+ else:
+ this_deleted_object_id = deleted_keys.pop(0)
+ check_format_condition(
+ object_id == this_deleted_object_id,
+ f"expected the next deleted object ID to be {object_id}, "
+ f"instead found {this_deleted_object_id}",
+ )
+ try:
+ next_in_linked_list = deleted_keys[0]
+ except IndexError:
+ next_in_linked_list = 0
+ f.write(
+ b"%010d %05d f \n"
+ % (next_in_linked_list, self.deleted_entries[object_id])
+ )
+ return startxref
+
+
+class PdfName:
+ def __init__(self, name):
+ if isinstance(name, PdfName):
+ self.name = name.name
+ elif isinstance(name, bytes):
+ self.name = name
+ else:
+ self.name = name.encode("us-ascii")
+
+ def name_as_str(self):
+ return self.name.decode("us-ascii")
+
+ def __eq__(self, other):
+ return (
+ isinstance(other, PdfName) and other.name == self.name
+ ) or other == self.name
+
+ def __hash__(self):
+ return hash(self.name)
+
+ def __repr__(self):
+ return f"PdfName({repr(self.name)})"
+
+ @classmethod
+ def from_pdf_stream(cls, data):
+ return cls(PdfParser.interpret_name(data))
+
+ allowed_chars = set(range(33, 127)) - {ord(c) for c in "#%/()<>[]{}"}
+
+ def __bytes__(self):
+ result = bytearray(b"/")
+ for b in self.name:
+ if b in self.allowed_chars:
+ result.append(b)
+ else:
+ result.extend(b"#%02X" % b)
+ return bytes(result)
+
+
+class PdfArray(list):
+ def __bytes__(self):
+ return b"[ " + b" ".join(pdf_repr(x) for x in self) + b" ]"
+
+
+class PdfDict(collections.UserDict):
+ def __setattr__(self, key, value):
+ if key == "data":
+ collections.UserDict.__setattr__(self, key, value)
+ else:
+ self[key.encode("us-ascii")] = value
+
+ def __getattr__(self, key):
+ try:
+ value = self[key.encode("us-ascii")]
+ except KeyError as e:
+ raise AttributeError(key) from e
+ if isinstance(value, bytes):
+ value = decode_text(value)
+ if key.endswith("Date"):
+ if value.startswith("D:"):
+ value = value[2:]
+
+ relationship = "Z"
+ if len(value) > 17:
+ relationship = value[14]
+ offset = int(value[15:17]) * 60
+ if len(value) > 20:
+ offset += int(value[18:20])
+
+ format = "%Y%m%d%H%M%S"[: len(value) - 2]
+ value = time.strptime(value[: len(format) + 2], format)
+ if relationship in ["+", "-"]:
+ offset *= 60
+ if relationship == "+":
+ offset *= -1
+ value = time.gmtime(calendar.timegm(value) + offset)
+ return value
+
+ def __bytes__(self):
+ out = bytearray(b"<<")
+ for key, value in self.items():
+ if value is None:
+ continue
+ value = pdf_repr(value)
+ out.extend(b"\n")
+ out.extend(bytes(PdfName(key)))
+ out.extend(b" ")
+ out.extend(value)
+ out.extend(b"\n>>")
+ return bytes(out)
+
+
+class PdfBinary:
+ def __init__(self, data):
+ self.data = data
+
+ def __bytes__(self):
+ return b"<%s>" % b"".join(b"%02X" % b for b in self.data)
+
+
+class PdfStream:
+ def __init__(self, dictionary, buf):
+ self.dictionary = dictionary
+ self.buf = buf
+
+ def decode(self):
+ try:
+ filter = self.dictionary.Filter
+ except AttributeError:
+ return self.buf
+ if filter == b"FlateDecode":
+ try:
+ expected_length = self.dictionary.DL
+ except AttributeError:
+ expected_length = self.dictionary.Length
+ return zlib.decompress(self.buf, bufsize=int(expected_length))
+ else:
+ msg = f"stream filter {repr(self.dictionary.Filter)} unknown/unsupported"
+ raise NotImplementedError(msg)
+
+
+def pdf_repr(x):
+ if x is True:
+ return b"true"
+ elif x is False:
+ return b"false"
+ elif x is None:
+ return b"null"
+ elif isinstance(x, (PdfName, PdfDict, PdfArray, PdfBinary)):
+ return bytes(x)
+ elif isinstance(x, (int, float)):
+ return str(x).encode("us-ascii")
+ elif isinstance(x, time.struct_time):
+ return b"(D:" + time.strftime("%Y%m%d%H%M%SZ", x).encode("us-ascii") + b")"
+ elif isinstance(x, dict):
+ return bytes(PdfDict(x))
+ elif isinstance(x, list):
+ return bytes(PdfArray(x))
+ elif isinstance(x, str):
+ return pdf_repr(encode_text(x))
+ elif isinstance(x, bytes):
+ # XXX escape more chars? handle binary garbage
+ x = x.replace(b"\\", b"\\\\")
+ x = x.replace(b"(", b"\\(")
+ x = x.replace(b")", b"\\)")
+ return b"(" + x + b")"
+ else:
+ return bytes(x)
+
+
+class PdfParser:
+ """Based on
+ https://www.adobe.com/content/dam/acom/en/devnet/acrobat/pdfs/PDF32000_2008.pdf
+ Supports PDF up to 1.4
+ """
+
+ def __init__(self, filename=None, f=None, buf=None, start_offset=0, mode="rb"):
+ if buf and f:
+ msg = "specify buf or f or filename, but not both buf and f"
+ raise RuntimeError(msg)
+ self.filename = filename
+ self.buf = buf
+ self.f = f
+ self.start_offset = start_offset
+ self.should_close_buf = False
+ self.should_close_file = False
+ if filename is not None and f is None:
+ self.f = f = open(filename, mode)
+ self.should_close_file = True
+ if f is not None:
+ self.buf = buf = self.get_buf_from_file(f)
+ self.should_close_buf = True
+ if not filename and hasattr(f, "name"):
+ self.filename = f.name
+ self.cached_objects = {}
+ if buf:
+ self.read_pdf_info()
+ else:
+ self.file_size_total = self.file_size_this = 0
+ self.root = PdfDict()
+ self.root_ref = None
+ self.info = PdfDict()
+ self.info_ref = None
+ self.page_tree_root = {}
+ self.pages = []
+ self.orig_pages = []
+ self.pages_ref = None
+ self.last_xref_section_offset = None
+ self.trailer_dict = {}
+ self.xref_table = XrefTable()
+ self.xref_table.reading_finished = True
+ if f:
+ self.seek_end()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+ return False # do not suppress exceptions
+
+ def start_writing(self):
+ self.close_buf()
+ self.seek_end()
+
+ def close_buf(self):
+ try:
+ self.buf.close()
+ except AttributeError:
+ pass
+ self.buf = None
+
+ def close(self):
+ if self.should_close_buf:
+ self.close_buf()
+ if self.f is not None and self.should_close_file:
+ self.f.close()
+ self.f = None
+
+ def seek_end(self):
+ self.f.seek(0, os.SEEK_END)
+
+ def write_header(self):
+ self.f.write(b"%PDF-1.4\n")
+
+ def write_comment(self, s):
+ self.f.write(f"% {s}\n".encode())
+
+ def write_catalog(self):
+ self.del_root()
+ self.root_ref = self.next_object_id(self.f.tell())
+ self.pages_ref = self.next_object_id(0)
+ self.rewrite_pages()
+ self.write_obj(self.root_ref, Type=PdfName(b"Catalog"), Pages=self.pages_ref)
+ self.write_obj(
+ self.pages_ref,
+ Type=PdfName(b"Pages"),
+ Count=len(self.pages),
+ Kids=self.pages,
+ )
+ return self.root_ref
+
+ def rewrite_pages(self):
+ pages_tree_nodes_to_delete = []
+ for i, page_ref in enumerate(self.orig_pages):
+ page_info = self.cached_objects[page_ref]
+ del self.xref_table[page_ref.object_id]
+ pages_tree_nodes_to_delete.append(page_info[PdfName(b"Parent")])
+ if page_ref not in self.pages:
+ # the page has been deleted
+ continue
+ # make dict keys into strings for passing to write_page
+ stringified_page_info = {}
+ for key, value in page_info.items():
+ # key should be a PdfName
+ stringified_page_info[key.name_as_str()] = value
+ stringified_page_info["Parent"] = self.pages_ref
+ new_page_ref = self.write_page(None, **stringified_page_info)
+ for j, cur_page_ref in enumerate(self.pages):
+ if cur_page_ref == page_ref:
+ # replace the page reference with the new one
+ self.pages[j] = new_page_ref
+ # delete redundant Pages tree nodes from xref table
+ for pages_tree_node_ref in pages_tree_nodes_to_delete:
+ while pages_tree_node_ref:
+ pages_tree_node = self.cached_objects[pages_tree_node_ref]
+ if pages_tree_node_ref.object_id in self.xref_table:
+ del self.xref_table[pages_tree_node_ref.object_id]
+ pages_tree_node_ref = pages_tree_node.get(b"Parent", None)
+ self.orig_pages = []
+
+ def write_xref_and_trailer(self, new_root_ref=None):
+ if new_root_ref:
+ self.del_root()
+ self.root_ref = new_root_ref
+ if self.info:
+ self.info_ref = self.write_obj(None, self.info)
+ start_xref = self.xref_table.write(self.f)
+ num_entries = len(self.xref_table)
+ trailer_dict = {b"Root": self.root_ref, b"Size": num_entries}
+ if self.last_xref_section_offset is not None:
+ trailer_dict[b"Prev"] = self.last_xref_section_offset
+ if self.info:
+ trailer_dict[b"Info"] = self.info_ref
+ self.last_xref_section_offset = start_xref
+ self.f.write(
+ b"trailer\n"
+ + bytes(PdfDict(trailer_dict))
+ + b"\nstartxref\n%d\n%%%%EOF" % start_xref
+ )
+
+ def write_page(self, ref, *objs, **dict_obj):
+ if isinstance(ref, int):
+ ref = self.pages[ref]
+ if "Type" not in dict_obj:
+ dict_obj["Type"] = PdfName(b"Page")
+ if "Parent" not in dict_obj:
+ dict_obj["Parent"] = self.pages_ref
+ return self.write_obj(ref, *objs, **dict_obj)
+
+ def write_obj(self, ref, *objs, **dict_obj):
+ f = self.f
+ if ref is None:
+ ref = self.next_object_id(f.tell())
+ else:
+ self.xref_table[ref.object_id] = (f.tell(), ref.generation)
+ f.write(bytes(IndirectObjectDef(*ref)))
+ stream = dict_obj.pop("stream", None)
+ if stream is not None:
+ dict_obj["Length"] = len(stream)
+ if dict_obj:
+ f.write(pdf_repr(dict_obj))
+ for obj in objs:
+ f.write(pdf_repr(obj))
+ if stream is not None:
+ f.write(b"stream\n")
+ f.write(stream)
+ f.write(b"\nendstream\n")
+ f.write(b"endobj\n")
+ return ref
+
+ def del_root(self):
+ if self.root_ref is None:
+ return
+ del self.xref_table[self.root_ref.object_id]
+ del self.xref_table[self.root[b"Pages"].object_id]
+
+ @staticmethod
+ def get_buf_from_file(f):
+ if hasattr(f, "getbuffer"):
+ return f.getbuffer()
+ elif hasattr(f, "getvalue"):
+ return f.getvalue()
+ else:
+ try:
+ return mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
+ except ValueError: # cannot mmap an empty file
+ return b""
+
+ def read_pdf_info(self):
+ self.file_size_total = len(self.buf)
+ self.file_size_this = self.file_size_total - self.start_offset
+ self.read_trailer()
+ self.root_ref = self.trailer_dict[b"Root"]
+ self.info_ref = self.trailer_dict.get(b"Info", None)
+ self.root = PdfDict(self.read_indirect(self.root_ref))
+ if self.info_ref is None:
+ self.info = PdfDict()
+ else:
+ self.info = PdfDict(self.read_indirect(self.info_ref))
+ check_format_condition(b"Type" in self.root, "/Type missing in Root")
+ check_format_condition(
+ self.root[b"Type"] == b"Catalog", "/Type in Root is not /Catalog"
+ )
+ check_format_condition(b"Pages" in self.root, "/Pages missing in Root")
+ check_format_condition(
+ isinstance(self.root[b"Pages"], IndirectReference),
+ "/Pages in Root is not an indirect reference",
+ )
+ self.pages_ref = self.root[b"Pages"]
+ self.page_tree_root = self.read_indirect(self.pages_ref)
+ self.pages = self.linearize_page_tree(self.page_tree_root)
+ # save the original list of page references
+ # in case the user modifies, adds or deletes some pages
+ # and we need to rewrite the pages and their list
+ self.orig_pages = self.pages[:]
+
+ def next_object_id(self, offset=None):
+ try:
+ # TODO: support reuse of deleted objects
+ reference = IndirectReference(max(self.xref_table.keys()) + 1, 0)
+ except ValueError:
+ reference = IndirectReference(1, 0)
+ if offset is not None:
+ self.xref_table[reference.object_id] = (offset, 0)
+ return reference
+
+ delimiter = rb"[][()<>{}/%]"
+ delimiter_or_ws = rb"[][()<>{}/%\000\011\012\014\015\040]"
+ whitespace = rb"[\000\011\012\014\015\040]"
+ whitespace_or_hex = rb"[\000\011\012\014\015\0400-9a-fA-F]"
+ whitespace_optional = whitespace + b"*"
+ whitespace_mandatory = whitespace + b"+"
+ # No "\012" aka "\n" or "\015" aka "\r":
+ whitespace_optional_no_nl = rb"[\000\011\014\040]*"
+ newline_only = rb"[\r\n]+"
+ newline = whitespace_optional_no_nl + newline_only + whitespace_optional_no_nl
+ re_trailer_end = re.compile(
+ whitespace_mandatory
+ + rb"trailer"
+ + whitespace_optional
+ + rb"<<(.*>>)"
+ + newline
+ + rb"startxref"
+ + newline
+ + rb"([0-9]+)"
+ + newline
+ + rb"%%EOF"
+ + whitespace_optional
+ + rb"$",
+ re.DOTALL,
+ )
+ re_trailer_prev = re.compile(
+ whitespace_optional
+ + rb"trailer"
+ + whitespace_optional
+ + rb"<<(.*?>>)"
+ + newline
+ + rb"startxref"
+ + newline
+ + rb"([0-9]+)"
+ + newline
+ + rb"%%EOF"
+ + whitespace_optional,
+ re.DOTALL,
+ )
+
+ def read_trailer(self):
+ search_start_offset = len(self.buf) - 16384
+ if search_start_offset < self.start_offset:
+ search_start_offset = self.start_offset
+ m = self.re_trailer_end.search(self.buf, search_start_offset)
+ check_format_condition(m, "trailer end not found")
+ # make sure we found the LAST trailer
+ last_match = m
+ while m:
+ last_match = m
+ m = self.re_trailer_end.search(self.buf, m.start() + 16)
+ if not m:
+ m = last_match
+ trailer_data = m.group(1)
+ self.last_xref_section_offset = int(m.group(2))
+ self.trailer_dict = self.interpret_trailer(trailer_data)
+ self.xref_table = XrefTable()
+ self.read_xref_table(xref_section_offset=self.last_xref_section_offset)
+ if b"Prev" in self.trailer_dict:
+ self.read_prev_trailer(self.trailer_dict[b"Prev"])
+
+ def read_prev_trailer(self, xref_section_offset):
+ trailer_offset = self.read_xref_table(xref_section_offset=xref_section_offset)
+ m = self.re_trailer_prev.search(
+ self.buf[trailer_offset : trailer_offset + 16384]
+ )
+ check_format_condition(m, "previous trailer not found")
+ trailer_data = m.group(1)
+ check_format_condition(
+ int(m.group(2)) == xref_section_offset,
+ "xref section offset in previous trailer doesn't match what was expected",
+ )
+ trailer_dict = self.interpret_trailer(trailer_data)
+ if b"Prev" in trailer_dict:
+ self.read_prev_trailer(trailer_dict[b"Prev"])
+
+ re_whitespace_optional = re.compile(whitespace_optional)
+ re_name = re.compile(
+ whitespace_optional
+ + rb"/([!-$&'*-.0-;=?-Z\\^-z|~]+)(?="
+ + delimiter_or_ws
+ + rb")"
+ )
+ re_dict_start = re.compile(whitespace_optional + rb"<<")
+ re_dict_end = re.compile(whitespace_optional + rb">>" + whitespace_optional)
+
+ @classmethod
+ def interpret_trailer(cls, trailer_data):
+ trailer = {}
+ offset = 0
+ while True:
+ m = cls.re_name.match(trailer_data, offset)
+ if not m:
+ m = cls.re_dict_end.match(trailer_data, offset)
+ check_format_condition(
+ m and m.end() == len(trailer_data),
+ "name not found in trailer, remaining data: "
+ + repr(trailer_data[offset:]),
+ )
+ break
+ key = cls.interpret_name(m.group(1))
+ value, offset = cls.get_value(trailer_data, m.end())
+ trailer[key] = value
+ check_format_condition(
+ b"Size" in trailer and isinstance(trailer[b"Size"], int),
+ "/Size not in trailer or not an integer",
+ )
+ check_format_condition(
+ b"Root" in trailer and isinstance(trailer[b"Root"], IndirectReference),
+ "/Root not in trailer or not an indirect reference",
+ )
+ return trailer
+
+ re_hashes_in_name = re.compile(rb"([^#]*)(#([0-9a-fA-F]{2}))?")
+
+ @classmethod
+ def interpret_name(cls, raw, as_text=False):
+ name = b""
+ for m in cls.re_hashes_in_name.finditer(raw):
+ if m.group(3):
+ name += m.group(1) + bytearray.fromhex(m.group(3).decode("us-ascii"))
+ else:
+ name += m.group(1)
+ if as_text:
+ return name.decode("utf-8")
+ else:
+ return bytes(name)
+
+ re_null = re.compile(whitespace_optional + rb"null(?=" + delimiter_or_ws + rb")")
+ re_true = re.compile(whitespace_optional + rb"true(?=" + delimiter_or_ws + rb")")
+ re_false = re.compile(whitespace_optional + rb"false(?=" + delimiter_or_ws + rb")")
+ re_int = re.compile(
+ whitespace_optional + rb"([-+]?[0-9]+)(?=" + delimiter_or_ws + rb")"
+ )
+ re_real = re.compile(
+ whitespace_optional
+ + rb"([-+]?([0-9]+\.[0-9]*|[0-9]*\.[0-9]+))(?="
+ + delimiter_or_ws
+ + rb")"
+ )
+ re_array_start = re.compile(whitespace_optional + rb"\[")
+ re_array_end = re.compile(whitespace_optional + rb"]")
+ re_string_hex = re.compile(
+ whitespace_optional + rb"<(" + whitespace_or_hex + rb"*)>"
+ )
+ re_string_lit = re.compile(whitespace_optional + rb"\(")
+ re_indirect_reference = re.compile(
+ whitespace_optional
+ + rb"([-+]?[0-9]+)"
+ + whitespace_mandatory
+ + rb"([-+]?[0-9]+)"
+ + whitespace_mandatory
+ + rb"R(?="
+ + delimiter_or_ws
+ + rb")"
+ )
+ re_indirect_def_start = re.compile(
+ whitespace_optional
+ + rb"([-+]?[0-9]+)"
+ + whitespace_mandatory
+ + rb"([-+]?[0-9]+)"
+ + whitespace_mandatory
+ + rb"obj(?="
+ + delimiter_or_ws
+ + rb")"
+ )
+ re_indirect_def_end = re.compile(
+ whitespace_optional + rb"endobj(?=" + delimiter_or_ws + rb")"
+ )
+ re_comment = re.compile(
+ rb"(" + whitespace_optional + rb"%[^\r\n]*" + newline + rb")*"
+ )
+ re_stream_start = re.compile(whitespace_optional + rb"stream\r?\n")
+ re_stream_end = re.compile(
+ whitespace_optional + rb"endstream(?=" + delimiter_or_ws + rb")"
+ )
+
+ @classmethod
+ def get_value(cls, data, offset, expect_indirect=None, max_nesting=-1):
+ if max_nesting == 0:
+ return None, None
+ m = cls.re_comment.match(data, offset)
+ if m:
+ offset = m.end()
+ m = cls.re_indirect_def_start.match(data, offset)
+ if m:
+ check_format_condition(
+ int(m.group(1)) > 0,
+ "indirect object definition: object ID must be greater than 0",
+ )
+ check_format_condition(
+ int(m.group(2)) >= 0,
+ "indirect object definition: generation must be non-negative",
+ )
+ check_format_condition(
+ expect_indirect is None
+ or expect_indirect
+ == IndirectReference(int(m.group(1)), int(m.group(2))),
+ "indirect object definition different than expected",
+ )
+ object, offset = cls.get_value(data, m.end(), max_nesting=max_nesting - 1)
+ if offset is None:
+ return object, None
+ m = cls.re_indirect_def_end.match(data, offset)
+ check_format_condition(m, "indirect object definition end not found")
+ return object, m.end()
+ check_format_condition(
+ not expect_indirect, "indirect object definition not found"
+ )
+ m = cls.re_indirect_reference.match(data, offset)
+ if m:
+ check_format_condition(
+ int(m.group(1)) > 0,
+ "indirect object reference: object ID must be greater than 0",
+ )
+ check_format_condition(
+ int(m.group(2)) >= 0,
+ "indirect object reference: generation must be non-negative",
+ )
+ return IndirectReference(int(m.group(1)), int(m.group(2))), m.end()
+ m = cls.re_dict_start.match(data, offset)
+ if m:
+ offset = m.end()
+ result = {}
+ m = cls.re_dict_end.match(data, offset)
+ while not m:
+ key, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1)
+ if offset is None:
+ return result, None
+ value, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1)
+ result[key] = value
+ if offset is None:
+ return result, None
+ m = cls.re_dict_end.match(data, offset)
+ offset = m.end()
+ m = cls.re_stream_start.match(data, offset)
+ if m:
+ try:
+ stream_len = int(result[b"Length"])
+ except (TypeError, KeyError, ValueError) as e:
+ msg = "bad or missing Length in stream dict (%r)" % result.get(
+ b"Length", None
+ )
+ raise PdfFormatError(msg) from e
+ stream_data = data[m.end() : m.end() + stream_len]
+ m = cls.re_stream_end.match(data, m.end() + stream_len)
+ check_format_condition(m, "stream end not found")
+ offset = m.end()
+ result = PdfStream(PdfDict(result), stream_data)
+ else:
+ result = PdfDict(result)
+ return result, offset
+ m = cls.re_array_start.match(data, offset)
+ if m:
+ offset = m.end()
+ result = []
+ m = cls.re_array_end.match(data, offset)
+ while not m:
+ value, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1)
+ result.append(value)
+ if offset is None:
+ return result, None
+ m = cls.re_array_end.match(data, offset)
+ return result, m.end()
+ m = cls.re_null.match(data, offset)
+ if m:
+ return None, m.end()
+ m = cls.re_true.match(data, offset)
+ if m:
+ return True, m.end()
+ m = cls.re_false.match(data, offset)
+ if m:
+ return False, m.end()
+ m = cls.re_name.match(data, offset)
+ if m:
+ return PdfName(cls.interpret_name(m.group(1))), m.end()
+ m = cls.re_int.match(data, offset)
+ if m:
+ return int(m.group(1)), m.end()
+ m = cls.re_real.match(data, offset)
+ if m:
+ # XXX Decimal instead of float???
+ return float(m.group(1)), m.end()
+ m = cls.re_string_hex.match(data, offset)
+ if m:
+ # filter out whitespace
+ hex_string = bytearray(
+ b for b in m.group(1) if b in b"0123456789abcdefABCDEF"
+ )
+ if len(hex_string) % 2 == 1:
+ # append a 0 if the length is not even - yes, at the end
+ hex_string.append(ord(b"0"))
+ return bytearray.fromhex(hex_string.decode("us-ascii")), m.end()
+ m = cls.re_string_lit.match(data, offset)
+ if m:
+ return cls.get_literal_string(data, m.end())
+ # return None, offset # fallback (only for debugging)
+ msg = "unrecognized object: " + repr(data[offset : offset + 32])
+ raise PdfFormatError(msg)
+
+ re_lit_str_token = re.compile(
+ rb"(\\[nrtbf()\\])|(\\[0-9]{1,3})|(\\(\r\n|\r|\n))|(\r\n|\r|\n)|(\()|(\))"
+ )
+ escaped_chars = {
+ b"n": b"\n",
+ b"r": b"\r",
+ b"t": b"\t",
+ b"b": b"\b",
+ b"f": b"\f",
+ b"(": b"(",
+ b")": b")",
+ b"\\": b"\\",
+ ord(b"n"): b"\n",
+ ord(b"r"): b"\r",
+ ord(b"t"): b"\t",
+ ord(b"b"): b"\b",
+ ord(b"f"): b"\f",
+ ord(b"("): b"(",
+ ord(b")"): b")",
+ ord(b"\\"): b"\\",
+ }
+
+ @classmethod
+ def get_literal_string(cls, data, offset):
+ nesting_depth = 0
+ result = bytearray()
+ for m in cls.re_lit_str_token.finditer(data, offset):
+ result.extend(data[offset : m.start()])
+ if m.group(1):
+ result.extend(cls.escaped_chars[m.group(1)[1]])
+ elif m.group(2):
+ result.append(int(m.group(2)[1:], 8))
+ elif m.group(3):
+ pass
+ elif m.group(5):
+ result.extend(b"\n")
+ elif m.group(6):
+ result.extend(b"(")
+ nesting_depth += 1
+ elif m.group(7):
+ if nesting_depth == 0:
+ return bytes(result), m.end()
+ result.extend(b")")
+ nesting_depth -= 1
+ offset = m.end()
+ msg = "unfinished literal string"
+ raise PdfFormatError(msg)
+
+ re_xref_section_start = re.compile(whitespace_optional + rb"xref" + newline)
+ re_xref_subsection_start = re.compile(
+ whitespace_optional
+ + rb"([0-9]+)"
+ + whitespace_mandatory
+ + rb"([0-9]+)"
+ + whitespace_optional
+ + newline_only
+ )
+ re_xref_entry = re.compile(rb"([0-9]{10}) ([0-9]{5}) ([fn])( \r| \n|\r\n)")
+
+ def read_xref_table(self, xref_section_offset):
+ subsection_found = False
+ m = self.re_xref_section_start.match(
+ self.buf, xref_section_offset + self.start_offset
+ )
+ check_format_condition(m, "xref section start not found")
+ offset = m.end()
+ while True:
+ m = self.re_xref_subsection_start.match(self.buf, offset)
+ if not m:
+ check_format_condition(
+ subsection_found, "xref subsection start not found"
+ )
+ break
+ subsection_found = True
+ offset = m.end()
+ first_object = int(m.group(1))
+ num_objects = int(m.group(2))
+ for i in range(first_object, first_object + num_objects):
+ m = self.re_xref_entry.match(self.buf, offset)
+ check_format_condition(m, "xref entry not found")
+ offset = m.end()
+ is_free = m.group(3) == b"f"
+ if not is_free:
+ generation = int(m.group(2))
+ new_entry = (int(m.group(1)), generation)
+ if i not in self.xref_table:
+ self.xref_table[i] = new_entry
+ return offset
+
+ def read_indirect(self, ref, max_nesting=-1):
+ offset, generation = self.xref_table[ref[0]]
+ check_format_condition(
+ generation == ref[1],
+ f"expected to find generation {ref[1]} for object ID {ref[0]} in xref "
+ f"table, instead found generation {generation} at offset {offset}",
+ )
+ value = self.get_value(
+ self.buf,
+ offset + self.start_offset,
+ expect_indirect=IndirectReference(*ref),
+ max_nesting=max_nesting,
+ )[0]
+ self.cached_objects[ref] = value
+ return value
+
+ def linearize_page_tree(self, node=None):
+ if node is None:
+ node = self.page_tree_root
+ check_format_condition(
+ node[b"Type"] == b"Pages", "/Type of page tree node is not /Pages"
+ )
+ pages = []
+ for kid in node[b"Kids"]:
+ kid_object = self.read_indirect(kid)
+ if kid_object[b"Type"] == b"Page":
+ pages.append(kid)
+ else:
+ pages.extend(self.linearize_page_tree(node=kid_object))
+ return pages
diff --git a/Lib/site-packages/PIL/PixarImagePlugin.py b/Lib/site-packages/PIL/PixarImagePlugin.py
new file mode 100644
index 0000000..af866fe
--- /dev/null
+++ b/Lib/site-packages/PIL/PixarImagePlugin.py
@@ -0,0 +1,70 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# PIXAR raster support for PIL
+#
+# history:
+# 97-01-29 fl Created
+#
+# notes:
+# This is incomplete; it is based on a few samples created with
+# Photoshop 2.5 and 3.0, and a summary description provided by
+# Greg Coats . Hopefully, "L" and
+# "RGBA" support will be added in future versions.
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1997.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+from . import Image, ImageFile
+from ._binary import i16le as i16
+
+#
+# helpers
+
+
+def _accept(prefix):
+ return prefix[:4] == b"\200\350\000\000"
+
+
+##
+# Image plugin for PIXAR raster images.
+
+
+class PixarImageFile(ImageFile.ImageFile):
+ format = "PIXAR"
+ format_description = "PIXAR raster image"
+
+ def _open(self):
+ # assuming a 4-byte magic label
+ s = self.fp.read(4)
+ if not _accept(s):
+ msg = "not a PIXAR file"
+ raise SyntaxError(msg)
+
+ # read rest of header
+ s = s + self.fp.read(508)
+
+ self._size = i16(s, 418), i16(s, 416)
+
+ # get channel/depth descriptions
+ mode = i16(s, 424), i16(s, 426)
+
+ if mode == (14, 2):
+ self._mode = "RGB"
+ # FIXME: to be continued...
+
+ # create tile descriptor (assuming "dumped")
+ self.tile = [("raw", (0, 0) + self.size, 1024, (self.mode, 0, 1))]
+
+
+#
+# --------------------------------------------------------------------
+
+Image.register_open(PixarImageFile.format, PixarImageFile, _accept)
+
+Image.register_extension(PixarImageFile.format, ".pxr")
diff --git a/Lib/site-packages/PIL/PngImagePlugin.py b/Lib/site-packages/PIL/PngImagePlugin.py
new file mode 100644
index 0000000..e4ed938
--- /dev/null
+++ b/Lib/site-packages/PIL/PngImagePlugin.py
@@ -0,0 +1,1460 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# PNG support code
+#
+# See "PNG (Portable Network Graphics) Specification, version 1.0;
+# W3C Recommendation", 1996-10-01, Thomas Boutell (ed.).
+#
+# history:
+# 1996-05-06 fl Created (couldn't resist it)
+# 1996-12-14 fl Upgraded, added read and verify support (0.2)
+# 1996-12-15 fl Separate PNG stream parser
+# 1996-12-29 fl Added write support, added getchunks
+# 1996-12-30 fl Eliminated circular references in decoder (0.3)
+# 1998-07-12 fl Read/write 16-bit images as mode I (0.4)
+# 2001-02-08 fl Added transparency support (from Zircon) (0.5)
+# 2001-04-16 fl Don't close data source in "open" method (0.6)
+# 2004-02-24 fl Don't even pretend to support interlaced files (0.7)
+# 2004-08-31 fl Do basic sanity check on chunk identifiers (0.8)
+# 2004-09-20 fl Added PngInfo chunk container
+# 2004-12-18 fl Added DPI read support (based on code by Niki Spahiev)
+# 2008-08-13 fl Added tRNS support for RGB images
+# 2009-03-06 fl Support for preserving ICC profiles (by Florian Hoech)
+# 2009-03-08 fl Added zTXT support (from Lowell Alleman)
+# 2009-03-29 fl Read interlaced PNG files (from Conrado Porto Lopes Gouvua)
+#
+# Copyright (c) 1997-2009 by Secret Labs AB
+# Copyright (c) 1996 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import itertools
+import logging
+import re
+import struct
+import warnings
+import zlib
+from enum import IntEnum
+
+from . import Image, ImageChops, ImageFile, ImagePalette, ImageSequence
+from ._binary import i16be as i16
+from ._binary import i32be as i32
+from ._binary import o8
+from ._binary import o16be as o16
+from ._binary import o32be as o32
+
+logger = logging.getLogger(__name__)
+
+is_cid = re.compile(rb"\w\w\w\w").match
+
+
+_MAGIC = b"\211PNG\r\n\032\n"
+
+
+_MODES = {
+ # supported bits/color combinations, and corresponding modes/rawmodes
+ # Grayscale
+ (1, 0): ("1", "1"),
+ (2, 0): ("L", "L;2"),
+ (4, 0): ("L", "L;4"),
+ (8, 0): ("L", "L"),
+ (16, 0): ("I", "I;16B"),
+ # Truecolour
+ (8, 2): ("RGB", "RGB"),
+ (16, 2): ("RGB", "RGB;16B"),
+ # Indexed-colour
+ (1, 3): ("P", "P;1"),
+ (2, 3): ("P", "P;2"),
+ (4, 3): ("P", "P;4"),
+ (8, 3): ("P", "P"),
+ # Grayscale with alpha
+ (8, 4): ("LA", "LA"),
+ (16, 4): ("RGBA", "LA;16B"), # LA;16B->LA not yet available
+ # Truecolour with alpha
+ (8, 6): ("RGBA", "RGBA"),
+ (16, 6): ("RGBA", "RGBA;16B"),
+}
+
+
+_simple_palette = re.compile(b"^\xff*\x00\xff*$")
+
+MAX_TEXT_CHUNK = ImageFile.SAFEBLOCK
+"""
+Maximum decompressed size for a iTXt or zTXt chunk.
+Eliminates decompression bombs where compressed chunks can expand 1000x.
+See :ref:`Text in PNG File Format`.
+"""
+MAX_TEXT_MEMORY = 64 * MAX_TEXT_CHUNK
+"""
+Set the maximum total text chunk size.
+See :ref:`Text in PNG File Format`.
+"""
+
+
+# APNG frame disposal modes
+class Disposal(IntEnum):
+ OP_NONE = 0
+ """
+ No disposal is done on this frame before rendering the next frame.
+ See :ref:`Saving APNG sequences`.
+ """
+ OP_BACKGROUND = 1
+ """
+ This frame’s modified region is cleared to fully transparent black before rendering
+ the next frame.
+ See :ref:`Saving APNG sequences`.
+ """
+ OP_PREVIOUS = 2
+ """
+ This frame’s modified region is reverted to the previous frame’s contents before
+ rendering the next frame.
+ See :ref:`Saving APNG sequences`.
+ """
+
+
+# APNG frame blend modes
+class Blend(IntEnum):
+ OP_SOURCE = 0
+ """
+ All color components of this frame, including alpha, overwrite the previous output
+ image contents.
+ See :ref:`Saving APNG sequences`.
+ """
+ OP_OVER = 1
+ """
+ This frame should be alpha composited with the previous output image contents.
+ See :ref:`Saving APNG sequences`.
+ """
+
+
+def _safe_zlib_decompress(s):
+ dobj = zlib.decompressobj()
+ plaintext = dobj.decompress(s, MAX_TEXT_CHUNK)
+ if dobj.unconsumed_tail:
+ msg = "Decompressed Data Too Large"
+ raise ValueError(msg)
+ return plaintext
+
+
+def _crc32(data, seed=0):
+ return zlib.crc32(data, seed) & 0xFFFFFFFF
+
+
+# --------------------------------------------------------------------
+# Support classes. Suitable for PNG and related formats like MNG etc.
+
+
+class ChunkStream:
+ def __init__(self, fp):
+ self.fp = fp
+ self.queue = []
+
+ def read(self):
+ """Fetch a new chunk. Returns header information."""
+ cid = None
+
+ if self.queue:
+ cid, pos, length = self.queue.pop()
+ self.fp.seek(pos)
+ else:
+ s = self.fp.read(8)
+ cid = s[4:]
+ pos = self.fp.tell()
+ length = i32(s)
+
+ if not is_cid(cid):
+ if not ImageFile.LOAD_TRUNCATED_IMAGES:
+ msg = f"broken PNG file (chunk {repr(cid)})"
+ raise SyntaxError(msg)
+
+ return cid, pos, length
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
+ def close(self):
+ self.queue = self.fp = None
+
+ def push(self, cid, pos, length):
+ self.queue.append((cid, pos, length))
+
+ def call(self, cid, pos, length):
+ """Call the appropriate chunk handler"""
+
+ logger.debug("STREAM %r %s %s", cid, pos, length)
+ return getattr(self, "chunk_" + cid.decode("ascii"))(pos, length)
+
+ def crc(self, cid, data):
+ """Read and verify checksum"""
+
+ # Skip CRC checks for ancillary chunks if allowed to load truncated
+ # images
+ # 5th byte of first char is 1 [specs, section 5.4]
+ if ImageFile.LOAD_TRUNCATED_IMAGES and (cid[0] >> 5 & 1):
+ self.crc_skip(cid, data)
+ return
+
+ try:
+ crc1 = _crc32(data, _crc32(cid))
+ crc2 = i32(self.fp.read(4))
+ if crc1 != crc2:
+ msg = f"broken PNG file (bad header checksum in {repr(cid)})"
+ raise SyntaxError(msg)
+ except struct.error as e:
+ msg = f"broken PNG file (incomplete checksum in {repr(cid)})"
+ raise SyntaxError(msg) from e
+
+ def crc_skip(self, cid, data):
+ """Read checksum"""
+
+ self.fp.read(4)
+
+ def verify(self, endchunk=b"IEND"):
+ # Simple approach; just calculate checksum for all remaining
+ # blocks. Must be called directly after open.
+
+ cids = []
+
+ while True:
+ try:
+ cid, pos, length = self.read()
+ except struct.error as e:
+ msg = "truncated PNG file"
+ raise OSError(msg) from e
+
+ if cid == endchunk:
+ break
+ self.crc(cid, ImageFile._safe_read(self.fp, length))
+ cids.append(cid)
+
+ return cids
+
+
+class iTXt(str):
+ """
+ Subclass of string to allow iTXt chunks to look like strings while
+ keeping their extra information
+
+ """
+
+ @staticmethod
+ def __new__(cls, text, lang=None, tkey=None):
+ """
+ :param cls: the class to use when creating the instance
+ :param text: value for this key
+ :param lang: language code
+ :param tkey: UTF-8 version of the key name
+ """
+
+ self = str.__new__(cls, text)
+ self.lang = lang
+ self.tkey = tkey
+ return self
+
+
+class PngInfo:
+ """
+ PNG chunk container (for use with save(pnginfo=))
+
+ """
+
+ def __init__(self):
+ self.chunks = []
+
+ def add(self, cid, data, after_idat=False):
+ """Appends an arbitrary chunk. Use with caution.
+
+ :param cid: a byte string, 4 bytes long.
+ :param data: a byte string of the encoded data
+ :param after_idat: for use with private chunks. Whether the chunk
+ should be written after IDAT
+
+ """
+
+ chunk = [cid, data]
+ if after_idat:
+ chunk.append(True)
+ self.chunks.append(tuple(chunk))
+
+ def add_itxt(self, key, value, lang="", tkey="", zip=False):
+ """Appends an iTXt chunk.
+
+ :param key: latin-1 encodable text key name
+ :param value: value for this key
+ :param lang: language code
+ :param tkey: UTF-8 version of the key name
+ :param zip: compression flag
+
+ """
+
+ if not isinstance(key, bytes):
+ key = key.encode("latin-1", "strict")
+ if not isinstance(value, bytes):
+ value = value.encode("utf-8", "strict")
+ if not isinstance(lang, bytes):
+ lang = lang.encode("utf-8", "strict")
+ if not isinstance(tkey, bytes):
+ tkey = tkey.encode("utf-8", "strict")
+
+ if zip:
+ self.add(
+ b"iTXt",
+ key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" + zlib.compress(value),
+ )
+ else:
+ self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" + value)
+
+ def add_text(self, key, value, zip=False):
+ """Appends a text chunk.
+
+ :param key: latin-1 encodable text key name
+ :param value: value for this key, text or an
+ :py:class:`PIL.PngImagePlugin.iTXt` instance
+ :param zip: compression flag
+
+ """
+ if isinstance(value, iTXt):
+ return self.add_itxt(key, value, value.lang, value.tkey, zip=zip)
+
+ # The tEXt chunk stores latin-1 text
+ if not isinstance(value, bytes):
+ try:
+ value = value.encode("latin-1", "strict")
+ except UnicodeError:
+ return self.add_itxt(key, value, zip=zip)
+
+ if not isinstance(key, bytes):
+ key = key.encode("latin-1", "strict")
+
+ if zip:
+ self.add(b"zTXt", key + b"\0\0" + zlib.compress(value))
+ else:
+ self.add(b"tEXt", key + b"\0" + value)
+
+
+# --------------------------------------------------------------------
+# PNG image stream (IHDR/IEND)
+
+
+class PngStream(ChunkStream):
+ def __init__(self, fp):
+ super().__init__(fp)
+
+ # local copies of Image attributes
+ self.im_info = {}
+ self.im_text = {}
+ self.im_size = (0, 0)
+ self.im_mode = None
+ self.im_tile = None
+ self.im_palette = None
+ self.im_custom_mimetype = None
+ self.im_n_frames = None
+ self._seq_num = None
+ self.rewind_state = None
+
+ self.text_memory = 0
+
+ def check_text_memory(self, chunklen):
+ self.text_memory += chunklen
+ if self.text_memory > MAX_TEXT_MEMORY:
+ msg = (
+ "Too much memory used in text chunks: "
+ f"{self.text_memory}>MAX_TEXT_MEMORY"
+ )
+ raise ValueError(msg)
+
+ def save_rewind(self):
+ self.rewind_state = {
+ "info": self.im_info.copy(),
+ "tile": self.im_tile,
+ "seq_num": self._seq_num,
+ }
+
+ def rewind(self):
+ self.im_info = self.rewind_state["info"]
+ self.im_tile = self.rewind_state["tile"]
+ self._seq_num = self.rewind_state["seq_num"]
+
+ def chunk_iCCP(self, pos, length):
+ # ICC profile
+ s = ImageFile._safe_read(self.fp, length)
+ # according to PNG spec, the iCCP chunk contains:
+ # Profile name 1-79 bytes (character string)
+ # Null separator 1 byte (null character)
+ # Compression method 1 byte (0)
+ # Compressed profile n bytes (zlib with deflate compression)
+ i = s.find(b"\0")
+ logger.debug("iCCP profile name %r", s[:i])
+ logger.debug("Compression method %s", s[i])
+ comp_method = s[i]
+ if comp_method != 0:
+ msg = f"Unknown compression method {comp_method} in iCCP chunk"
+ raise SyntaxError(msg)
+ try:
+ icc_profile = _safe_zlib_decompress(s[i + 2 :])
+ except ValueError:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ icc_profile = None
+ else:
+ raise
+ except zlib.error:
+ icc_profile = None # FIXME
+ self.im_info["icc_profile"] = icc_profile
+ return s
+
+ def chunk_IHDR(self, pos, length):
+ # image header
+ s = ImageFile._safe_read(self.fp, length)
+ if length < 13:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ return s
+ msg = "Truncated IHDR chunk"
+ raise ValueError(msg)
+ self.im_size = i32(s, 0), i32(s, 4)
+ try:
+ self.im_mode, self.im_rawmode = _MODES[(s[8], s[9])]
+ except Exception:
+ pass
+ if s[12]:
+ self.im_info["interlace"] = 1
+ if s[11]:
+ msg = "unknown filter category"
+ raise SyntaxError(msg)
+ return s
+
+ def chunk_IDAT(self, pos, length):
+ # image data
+ if "bbox" in self.im_info:
+ tile = [("zip", self.im_info["bbox"], pos, self.im_rawmode)]
+ else:
+ if self.im_n_frames is not None:
+ self.im_info["default_image"] = True
+ tile = [("zip", (0, 0) + self.im_size, pos, self.im_rawmode)]
+ self.im_tile = tile
+ self.im_idat = length
+ msg = "image data found"
+ raise EOFError(msg)
+
+ def chunk_IEND(self, pos, length):
+ msg = "end of PNG image"
+ raise EOFError(msg)
+
+ def chunk_PLTE(self, pos, length):
+ # palette
+ s = ImageFile._safe_read(self.fp, length)
+ if self.im_mode == "P":
+ self.im_palette = "RGB", s
+ return s
+
+ def chunk_tRNS(self, pos, length):
+ # transparency
+ s = ImageFile._safe_read(self.fp, length)
+ if self.im_mode == "P":
+ if _simple_palette.match(s):
+ # tRNS contains only one full-transparent entry,
+ # other entries are full opaque
+ i = s.find(b"\0")
+ if i >= 0:
+ self.im_info["transparency"] = i
+ else:
+ # otherwise, we have a byte string with one alpha value
+ # for each palette entry
+ self.im_info["transparency"] = s
+ elif self.im_mode in ("1", "L", "I"):
+ self.im_info["transparency"] = i16(s)
+ elif self.im_mode == "RGB":
+ self.im_info["transparency"] = i16(s), i16(s, 2), i16(s, 4)
+ return s
+
+ def chunk_gAMA(self, pos, length):
+ # gamma setting
+ s = ImageFile._safe_read(self.fp, length)
+ self.im_info["gamma"] = i32(s) / 100000.0
+ return s
+
+ def chunk_cHRM(self, pos, length):
+ # chromaticity, 8 unsigned ints, actual value is scaled by 100,000
+ # WP x,y, Red x,y, Green x,y Blue x,y
+
+ s = ImageFile._safe_read(self.fp, length)
+ raw_vals = struct.unpack(">%dI" % (len(s) // 4), s)
+ self.im_info["chromaticity"] = tuple(elt / 100000.0 for elt in raw_vals)
+ return s
+
+ def chunk_sRGB(self, pos, length):
+ # srgb rendering intent, 1 byte
+ # 0 perceptual
+ # 1 relative colorimetric
+ # 2 saturation
+ # 3 absolute colorimetric
+
+ s = ImageFile._safe_read(self.fp, length)
+ if length < 1:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ return s
+ msg = "Truncated sRGB chunk"
+ raise ValueError(msg)
+ self.im_info["srgb"] = s[0]
+ return s
+
+ def chunk_pHYs(self, pos, length):
+ # pixels per unit
+ s = ImageFile._safe_read(self.fp, length)
+ if length < 9:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ return s
+ msg = "Truncated pHYs chunk"
+ raise ValueError(msg)
+ px, py = i32(s, 0), i32(s, 4)
+ unit = s[8]
+ if unit == 1: # meter
+ dpi = px * 0.0254, py * 0.0254
+ self.im_info["dpi"] = dpi
+ elif unit == 0:
+ self.im_info["aspect"] = px, py
+ return s
+
+ def chunk_tEXt(self, pos, length):
+ # text
+ s = ImageFile._safe_read(self.fp, length)
+ try:
+ k, v = s.split(b"\0", 1)
+ except ValueError:
+ # fallback for broken tEXt tags
+ k = s
+ v = b""
+ if k:
+ k = k.decode("latin-1", "strict")
+ v_str = v.decode("latin-1", "replace")
+
+ self.im_info[k] = v if k == "exif" else v_str
+ self.im_text[k] = v_str
+ self.check_text_memory(len(v_str))
+
+ return s
+
+ def chunk_zTXt(self, pos, length):
+ # compressed text
+ s = ImageFile._safe_read(self.fp, length)
+ try:
+ k, v = s.split(b"\0", 1)
+ except ValueError:
+ k = s
+ v = b""
+ if v:
+ comp_method = v[0]
+ else:
+ comp_method = 0
+ if comp_method != 0:
+ msg = f"Unknown compression method {comp_method} in zTXt chunk"
+ raise SyntaxError(msg)
+ try:
+ v = _safe_zlib_decompress(v[1:])
+ except ValueError:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ v = b""
+ else:
+ raise
+ except zlib.error:
+ v = b""
+
+ if k:
+ k = k.decode("latin-1", "strict")
+ v = v.decode("latin-1", "replace")
+
+ self.im_info[k] = self.im_text[k] = v
+ self.check_text_memory(len(v))
+
+ return s
+
+ def chunk_iTXt(self, pos, length):
+ # international text
+ r = s = ImageFile._safe_read(self.fp, length)
+ try:
+ k, r = r.split(b"\0", 1)
+ except ValueError:
+ return s
+ if len(r) < 2:
+ return s
+ cf, cm, r = r[0], r[1], r[2:]
+ try:
+ lang, tk, v = r.split(b"\0", 2)
+ except ValueError:
+ return s
+ if cf != 0:
+ if cm == 0:
+ try:
+ v = _safe_zlib_decompress(v)
+ except ValueError:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ return s
+ else:
+ raise
+ except zlib.error:
+ return s
+ else:
+ return s
+ try:
+ k = k.decode("latin-1", "strict")
+ lang = lang.decode("utf-8", "strict")
+ tk = tk.decode("utf-8", "strict")
+ v = v.decode("utf-8", "strict")
+ except UnicodeError:
+ return s
+
+ self.im_info[k] = self.im_text[k] = iTXt(v, lang, tk)
+ self.check_text_memory(len(v))
+
+ return s
+
+ def chunk_eXIf(self, pos, length):
+ s = ImageFile._safe_read(self.fp, length)
+ self.im_info["exif"] = b"Exif\x00\x00" + s
+ return s
+
+ # APNG chunks
+ def chunk_acTL(self, pos, length):
+ s = ImageFile._safe_read(self.fp, length)
+ if length < 8:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ return s
+ msg = "APNG contains truncated acTL chunk"
+ raise ValueError(msg)
+ if self.im_n_frames is not None:
+ self.im_n_frames = None
+ warnings.warn("Invalid APNG, will use default PNG image if possible")
+ return s
+ n_frames = i32(s)
+ if n_frames == 0 or n_frames > 0x80000000:
+ warnings.warn("Invalid APNG, will use default PNG image if possible")
+ return s
+ self.im_n_frames = n_frames
+ self.im_info["loop"] = i32(s, 4)
+ self.im_custom_mimetype = "image/apng"
+ return s
+
+ def chunk_fcTL(self, pos, length):
+ s = ImageFile._safe_read(self.fp, length)
+ if length < 26:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ return s
+ msg = "APNG contains truncated fcTL chunk"
+ raise ValueError(msg)
+ seq = i32(s)
+ if (self._seq_num is None and seq != 0) or (
+ self._seq_num is not None and self._seq_num != seq - 1
+ ):
+ msg = "APNG contains frame sequence errors"
+ raise SyntaxError(msg)
+ self._seq_num = seq
+ width, height = i32(s, 4), i32(s, 8)
+ px, py = i32(s, 12), i32(s, 16)
+ im_w, im_h = self.im_size
+ if px + width > im_w or py + height > im_h:
+ msg = "APNG contains invalid frames"
+ raise SyntaxError(msg)
+ self.im_info["bbox"] = (px, py, px + width, py + height)
+ delay_num, delay_den = i16(s, 20), i16(s, 22)
+ if delay_den == 0:
+ delay_den = 100
+ self.im_info["duration"] = float(delay_num) / float(delay_den) * 1000
+ self.im_info["disposal"] = s[24]
+ self.im_info["blend"] = s[25]
+ return s
+
+ def chunk_fdAT(self, pos, length):
+ if length < 4:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ s = ImageFile._safe_read(self.fp, length)
+ return s
+ msg = "APNG contains truncated fDAT chunk"
+ raise ValueError(msg)
+ s = ImageFile._safe_read(self.fp, 4)
+ seq = i32(s)
+ if self._seq_num != seq - 1:
+ msg = "APNG contains frame sequence errors"
+ raise SyntaxError(msg)
+ self._seq_num = seq
+ return self.chunk_IDAT(pos + 4, length - 4)
+
+
+# --------------------------------------------------------------------
+# PNG reader
+
+
+def _accept(prefix):
+ return prefix[:8] == _MAGIC
+
+
+##
+# Image plugin for PNG images.
+
+
+class PngImageFile(ImageFile.ImageFile):
+ format = "PNG"
+ format_description = "Portable network graphics"
+
+ def _open(self):
+ if not _accept(self.fp.read(8)):
+ msg = "not a PNG file"
+ raise SyntaxError(msg)
+ self._fp = self.fp
+ self.__frame = 0
+
+ #
+ # Parse headers up to the first IDAT or fDAT chunk
+
+ self.private_chunks = []
+ self.png = PngStream(self.fp)
+
+ while True:
+ #
+ # get next chunk
+
+ cid, pos, length = self.png.read()
+
+ try:
+ s = self.png.call(cid, pos, length)
+ except EOFError:
+ break
+ except AttributeError:
+ logger.debug("%r %s %s (unknown)", cid, pos, length)
+ s = ImageFile._safe_read(self.fp, length)
+ if cid[1:2].islower():
+ self.private_chunks.append((cid, s))
+
+ self.png.crc(cid, s)
+
+ #
+ # Copy relevant attributes from the PngStream. An alternative
+ # would be to let the PngStream class modify these attributes
+ # directly, but that introduces circular references which are
+ # difficult to break if things go wrong in the decoder...
+ # (believe me, I've tried ;-)
+
+ self._mode = self.png.im_mode
+ self._size = self.png.im_size
+ self.info = self.png.im_info
+ self._text = None
+ self.tile = self.png.im_tile
+ self.custom_mimetype = self.png.im_custom_mimetype
+ self.n_frames = self.png.im_n_frames or 1
+ self.default_image = self.info.get("default_image", False)
+
+ if self.png.im_palette:
+ rawmode, data = self.png.im_palette
+ self.palette = ImagePalette.raw(rawmode, data)
+
+ if cid == b"fdAT":
+ self.__prepare_idat = length - 4
+ else:
+ self.__prepare_idat = length # used by load_prepare()
+
+ if self.png.im_n_frames is not None:
+ self._close_exclusive_fp_after_loading = False
+ self.png.save_rewind()
+ self.__rewind_idat = self.__prepare_idat
+ self.__rewind = self._fp.tell()
+ if self.default_image:
+ # IDAT chunk contains default image and not first animation frame
+ self.n_frames += 1
+ self._seek(0)
+ self.is_animated = self.n_frames > 1
+
+ @property
+ def text(self):
+ # experimental
+ if self._text is None:
+ # iTxt, tEXt and zTXt chunks may appear at the end of the file
+ # So load the file to ensure that they are read
+ if self.is_animated:
+ frame = self.__frame
+ # for APNG, seek to the final frame before loading
+ self.seek(self.n_frames - 1)
+ self.load()
+ if self.is_animated:
+ self.seek(frame)
+ return self._text
+
+ def verify(self):
+ """Verify PNG file"""
+
+ if self.fp is None:
+ msg = "verify must be called directly after open"
+ raise RuntimeError(msg)
+
+ # back up to beginning of IDAT block
+ self.fp.seek(self.tile[0][2] - 8)
+
+ self.png.verify()
+ self.png.close()
+
+ if self._exclusive_fp:
+ self.fp.close()
+ self.fp = None
+
+ def seek(self, frame):
+ if not self._seek_check(frame):
+ return
+ if frame < self.__frame:
+ self._seek(0, True)
+
+ last_frame = self.__frame
+ for f in range(self.__frame + 1, frame + 1):
+ try:
+ self._seek(f)
+ except EOFError as e:
+ self.seek(last_frame)
+ msg = "no more images in APNG file"
+ raise EOFError(msg) from e
+
+ def _seek(self, frame, rewind=False):
+ if frame == 0:
+ if rewind:
+ self._fp.seek(self.__rewind)
+ self.png.rewind()
+ self.__prepare_idat = self.__rewind_idat
+ self.im = None
+ if self.pyaccess:
+ self.pyaccess = None
+ self.info = self.png.im_info
+ self.tile = self.png.im_tile
+ self.fp = self._fp
+ self._prev_im = None
+ self.dispose = None
+ self.default_image = self.info.get("default_image", False)
+ self.dispose_op = self.info.get("disposal")
+ self.blend_op = self.info.get("blend")
+ self.dispose_extent = self.info.get("bbox")
+ self.__frame = 0
+ else:
+ if frame != self.__frame + 1:
+ msg = f"cannot seek to frame {frame}"
+ raise ValueError(msg)
+
+ # ensure previous frame was loaded
+ self.load()
+
+ if self.dispose:
+ self.im.paste(self.dispose, self.dispose_extent)
+ self._prev_im = self.im.copy()
+
+ self.fp = self._fp
+
+ # advance to the next frame
+ if self.__prepare_idat:
+ ImageFile._safe_read(self.fp, self.__prepare_idat)
+ self.__prepare_idat = 0
+ frame_start = False
+ while True:
+ self.fp.read(4) # CRC
+
+ try:
+ cid, pos, length = self.png.read()
+ except (struct.error, SyntaxError):
+ break
+
+ if cid == b"IEND":
+ msg = "No more images in APNG file"
+ raise EOFError(msg)
+ if cid == b"fcTL":
+ if frame_start:
+ # there must be at least one fdAT chunk between fcTL chunks
+ msg = "APNG missing frame data"
+ raise SyntaxError(msg)
+ frame_start = True
+
+ try:
+ self.png.call(cid, pos, length)
+ except UnicodeDecodeError:
+ break
+ except EOFError:
+ if cid == b"fdAT":
+ length -= 4
+ if frame_start:
+ self.__prepare_idat = length
+ break
+ ImageFile._safe_read(self.fp, length)
+ except AttributeError:
+ logger.debug("%r %s %s (unknown)", cid, pos, length)
+ ImageFile._safe_read(self.fp, length)
+
+ self.__frame = frame
+ self.tile = self.png.im_tile
+ self.dispose_op = self.info.get("disposal")
+ self.blend_op = self.info.get("blend")
+ self.dispose_extent = self.info.get("bbox")
+
+ if not self.tile:
+ msg = "image not found in APNG frame"
+ raise EOFError(msg)
+
+ # setup frame disposal (actual disposal done when needed in the next _seek())
+ if self._prev_im is None and self.dispose_op == Disposal.OP_PREVIOUS:
+ self.dispose_op = Disposal.OP_BACKGROUND
+
+ if self.dispose_op == Disposal.OP_PREVIOUS:
+ self.dispose = self._prev_im.copy()
+ self.dispose = self._crop(self.dispose, self.dispose_extent)
+ elif self.dispose_op == Disposal.OP_BACKGROUND:
+ self.dispose = Image.core.fill(self.mode, self.size)
+ self.dispose = self._crop(self.dispose, self.dispose_extent)
+ else:
+ self.dispose = None
+
+ def tell(self):
+ return self.__frame
+
+ def load_prepare(self):
+ """internal: prepare to read PNG file"""
+
+ if self.info.get("interlace"):
+ self.decoderconfig = self.decoderconfig + (1,)
+
+ self.__idat = self.__prepare_idat # used by load_read()
+ ImageFile.ImageFile.load_prepare(self)
+
+ def load_read(self, read_bytes):
+ """internal: read more image data"""
+
+ while self.__idat == 0:
+ # end of chunk, skip forward to next one
+
+ self.fp.read(4) # CRC
+
+ cid, pos, length = self.png.read()
+
+ if cid not in [b"IDAT", b"DDAT", b"fdAT"]:
+ self.png.push(cid, pos, length)
+ return b""
+
+ if cid == b"fdAT":
+ try:
+ self.png.call(cid, pos, length)
+ except EOFError:
+ pass
+ self.__idat = length - 4 # sequence_num has already been read
+ else:
+ self.__idat = length # empty chunks are allowed
+
+ # read more data from this chunk
+ if read_bytes <= 0:
+ read_bytes = self.__idat
+ else:
+ read_bytes = min(read_bytes, self.__idat)
+
+ self.__idat = self.__idat - read_bytes
+
+ return self.fp.read(read_bytes)
+
+ def load_end(self):
+ """internal: finished reading image data"""
+ if self.__idat != 0:
+ self.fp.read(self.__idat)
+ while True:
+ self.fp.read(4) # CRC
+
+ try:
+ cid, pos, length = self.png.read()
+ except (struct.error, SyntaxError):
+ break
+
+ if cid == b"IEND":
+ break
+ elif cid == b"fcTL" and self.is_animated:
+ # start of the next frame, stop reading
+ self.__prepare_idat = 0
+ self.png.push(cid, pos, length)
+ break
+
+ try:
+ self.png.call(cid, pos, length)
+ except UnicodeDecodeError:
+ break
+ except EOFError:
+ if cid == b"fdAT":
+ length -= 4
+ ImageFile._safe_read(self.fp, length)
+ except AttributeError:
+ logger.debug("%r %s %s (unknown)", cid, pos, length)
+ s = ImageFile._safe_read(self.fp, length)
+ if cid[1:2].islower():
+ self.private_chunks.append((cid, s, True))
+ self._text = self.png.im_text
+ if not self.is_animated:
+ self.png.close()
+ self.png = None
+ else:
+ if self._prev_im and self.blend_op == Blend.OP_OVER:
+ updated = self._crop(self.im, self.dispose_extent)
+ if self.im.mode == "RGB" and "transparency" in self.info:
+ mask = updated.convert_transparent(
+ "RGBA", self.info["transparency"]
+ )
+ else:
+ mask = updated.convert("RGBA")
+ self._prev_im.paste(updated, self.dispose_extent, mask)
+ self.im = self._prev_im
+ if self.pyaccess:
+ self.pyaccess = None
+
+ def _getexif(self):
+ if "exif" not in self.info:
+ self.load()
+ if "exif" not in self.info and "Raw profile type exif" not in self.info:
+ return None
+ return self.getexif()._get_merged_dict()
+
+ def getexif(self):
+ if "exif" not in self.info:
+ self.load()
+
+ return super().getexif()
+
+ def getxmp(self):
+ """
+ Returns a dictionary containing the XMP tags.
+ Requires defusedxml to be installed.
+
+ :returns: XMP tags in a dictionary.
+ """
+ return (
+ self._getxmp(self.info["XML:com.adobe.xmp"])
+ if "XML:com.adobe.xmp" in self.info
+ else {}
+ )
+
+
+# --------------------------------------------------------------------
+# PNG writer
+
+_OUTMODES = {
+ # supported PIL modes, and corresponding rawmodes/bits/color combinations
+ "1": ("1", b"\x01\x00"),
+ "L;1": ("L;1", b"\x01\x00"),
+ "L;2": ("L;2", b"\x02\x00"),
+ "L;4": ("L;4", b"\x04\x00"),
+ "L": ("L", b"\x08\x00"),
+ "LA": ("LA", b"\x08\x04"),
+ "I": ("I;16B", b"\x10\x00"),
+ "I;16": ("I;16B", b"\x10\x00"),
+ "I;16B": ("I;16B", b"\x10\x00"),
+ "P;1": ("P;1", b"\x01\x03"),
+ "P;2": ("P;2", b"\x02\x03"),
+ "P;4": ("P;4", b"\x04\x03"),
+ "P": ("P", b"\x08\x03"),
+ "RGB": ("RGB", b"\x08\x02"),
+ "RGBA": ("RGBA", b"\x08\x06"),
+}
+
+
+def putchunk(fp, cid, *data):
+ """Write a PNG chunk (including CRC field)"""
+
+ data = b"".join(data)
+
+ fp.write(o32(len(data)) + cid)
+ fp.write(data)
+ crc = _crc32(data, _crc32(cid))
+ fp.write(o32(crc))
+
+
+class _idat:
+ # wrap output from the encoder in IDAT chunks
+
+ def __init__(self, fp, chunk):
+ self.fp = fp
+ self.chunk = chunk
+
+ def write(self, data):
+ self.chunk(self.fp, b"IDAT", data)
+
+
+class _fdat:
+ # wrap encoder output in fdAT chunks
+
+ def __init__(self, fp, chunk, seq_num):
+ self.fp = fp
+ self.chunk = chunk
+ self.seq_num = seq_num
+
+ def write(self, data):
+ self.chunk(self.fp, b"fdAT", o32(self.seq_num), data)
+ self.seq_num += 1
+
+
+def _write_multiple_frames(im, fp, chunk, rawmode, default_image, append_images):
+ duration = im.encoderinfo.get("duration", im.info.get("duration", 0))
+ loop = im.encoderinfo.get("loop", im.info.get("loop", 0))
+ disposal = im.encoderinfo.get("disposal", im.info.get("disposal", Disposal.OP_NONE))
+ blend = im.encoderinfo.get("blend", im.info.get("blend", Blend.OP_SOURCE))
+
+ if default_image:
+ chain = itertools.chain(append_images)
+ else:
+ chain = itertools.chain([im], append_images)
+
+ im_frames = []
+ frame_count = 0
+ for im_seq in chain:
+ for im_frame in ImageSequence.Iterator(im_seq):
+ if im_frame.mode == rawmode:
+ im_frame = im_frame.copy()
+ else:
+ im_frame = im_frame.convert(rawmode)
+ encoderinfo = im.encoderinfo.copy()
+ if isinstance(duration, (list, tuple)):
+ encoderinfo["duration"] = duration[frame_count]
+ if isinstance(disposal, (list, tuple)):
+ encoderinfo["disposal"] = disposal[frame_count]
+ if isinstance(blend, (list, tuple)):
+ encoderinfo["blend"] = blend[frame_count]
+ frame_count += 1
+
+ if im_frames:
+ previous = im_frames[-1]
+ prev_disposal = previous["encoderinfo"].get("disposal")
+ prev_blend = previous["encoderinfo"].get("blend")
+ if prev_disposal == Disposal.OP_PREVIOUS and len(im_frames) < 2:
+ prev_disposal = Disposal.OP_BACKGROUND
+
+ if prev_disposal == Disposal.OP_BACKGROUND:
+ base_im = previous["im"].copy()
+ dispose = Image.core.fill("RGBA", im.size, (0, 0, 0, 0))
+ bbox = previous["bbox"]
+ if bbox:
+ dispose = dispose.crop(bbox)
+ else:
+ bbox = (0, 0) + im.size
+ base_im.paste(dispose, bbox)
+ elif prev_disposal == Disposal.OP_PREVIOUS:
+ base_im = im_frames[-2]["im"]
+ else:
+ base_im = previous["im"]
+ delta = ImageChops.subtract_modulo(
+ im_frame.convert("RGBA"), base_im.convert("RGBA")
+ )
+ bbox = delta.getbbox(alpha_only=False)
+ if (
+ not bbox
+ and prev_disposal == encoderinfo.get("disposal")
+ and prev_blend == encoderinfo.get("blend")
+ ):
+ previous["encoderinfo"]["duration"] += encoderinfo.get(
+ "duration", duration
+ )
+ continue
+ else:
+ bbox = None
+ if "duration" not in encoderinfo:
+ encoderinfo["duration"] = duration
+ im_frames.append({"im": im_frame, "bbox": bbox, "encoderinfo": encoderinfo})
+
+ if len(im_frames) == 1 and not default_image:
+ return im_frames[0]["im"]
+
+ # animation control
+ chunk(
+ fp,
+ b"acTL",
+ o32(len(im_frames)), # 0: num_frames
+ o32(loop), # 4: num_plays
+ )
+
+ # default image IDAT (if it exists)
+ if default_image:
+ if im.mode != rawmode:
+ im = im.convert(rawmode)
+ ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)])
+
+ seq_num = 0
+ for frame, frame_data in enumerate(im_frames):
+ im_frame = frame_data["im"]
+ if not frame_data["bbox"]:
+ bbox = (0, 0) + im_frame.size
+ else:
+ bbox = frame_data["bbox"]
+ im_frame = im_frame.crop(bbox)
+ size = im_frame.size
+ encoderinfo = frame_data["encoderinfo"]
+ frame_duration = int(round(encoderinfo["duration"]))
+ frame_disposal = encoderinfo.get("disposal", disposal)
+ frame_blend = encoderinfo.get("blend", blend)
+ # frame control
+ chunk(
+ fp,
+ b"fcTL",
+ o32(seq_num), # sequence_number
+ o32(size[0]), # width
+ o32(size[1]), # height
+ o32(bbox[0]), # x_offset
+ o32(bbox[1]), # y_offset
+ o16(frame_duration), # delay_numerator
+ o16(1000), # delay_denominator
+ o8(frame_disposal), # dispose_op
+ o8(frame_blend), # blend_op
+ )
+ seq_num += 1
+ # frame data
+ if frame == 0 and not default_image:
+ # first frame must be in IDAT chunks for backwards compatibility
+ ImageFile._save(
+ im_frame,
+ _idat(fp, chunk),
+ [("zip", (0, 0) + im_frame.size, 0, rawmode)],
+ )
+ else:
+ fdat_chunks = _fdat(fp, chunk, seq_num)
+ ImageFile._save(
+ im_frame,
+ fdat_chunks,
+ [("zip", (0, 0) + im_frame.size, 0, rawmode)],
+ )
+ seq_num = fdat_chunks.seq_num
+
+
+def _save_all(im, fp, filename):
+ _save(im, fp, filename, save_all=True)
+
+
+def _save(im, fp, filename, chunk=putchunk, save_all=False):
+ # save an image to disk (called by the save method)
+
+ if save_all:
+ default_image = im.encoderinfo.get(
+ "default_image", im.info.get("default_image")
+ )
+ modes = set()
+ append_images = im.encoderinfo.get("append_images", [])
+ for im_seq in itertools.chain([im], append_images):
+ for im_frame in ImageSequence.Iterator(im_seq):
+ modes.add(im_frame.mode)
+ for mode in ("RGBA", "RGB", "P"):
+ if mode in modes:
+ break
+ else:
+ mode = modes.pop()
+ else:
+ mode = im.mode
+
+ if mode == "P":
+ #
+ # attempt to minimize storage requirements for palette images
+ if "bits" in im.encoderinfo:
+ # number of bits specified by user
+ colors = min(1 << im.encoderinfo["bits"], 256)
+ else:
+ # check palette contents
+ if im.palette:
+ colors = max(min(len(im.palette.getdata()[1]) // 3, 256), 1)
+ else:
+ colors = 256
+
+ if colors <= 16:
+ if colors <= 2:
+ bits = 1
+ elif colors <= 4:
+ bits = 2
+ else:
+ bits = 4
+ mode = f"{mode};{bits}"
+
+ # encoder options
+ im.encoderconfig = (
+ im.encoderinfo.get("optimize", False),
+ im.encoderinfo.get("compress_level", -1),
+ im.encoderinfo.get("compress_type", -1),
+ im.encoderinfo.get("dictionary", b""),
+ )
+
+ # get the corresponding PNG mode
+ try:
+ rawmode, mode = _OUTMODES[mode]
+ except KeyError as e:
+ msg = f"cannot write mode {mode} as PNG"
+ raise OSError(msg) from e
+
+ #
+ # write minimal PNG file
+
+ fp.write(_MAGIC)
+
+ chunk(
+ fp,
+ b"IHDR",
+ o32(im.size[0]), # 0: size
+ o32(im.size[1]),
+ mode, # 8: depth/type
+ b"\0", # 10: compression
+ b"\0", # 11: filter category
+ b"\0", # 12: interlace flag
+ )
+
+ chunks = [b"cHRM", b"gAMA", b"sBIT", b"sRGB", b"tIME"]
+
+ icc = im.encoderinfo.get("icc_profile", im.info.get("icc_profile"))
+ if icc:
+ # ICC profile
+ # according to PNG spec, the iCCP chunk contains:
+ # Profile name 1-79 bytes (character string)
+ # Null separator 1 byte (null character)
+ # Compression method 1 byte (0)
+ # Compressed profile n bytes (zlib with deflate compression)
+ name = b"ICC Profile"
+ data = name + b"\0\0" + zlib.compress(icc)
+ chunk(fp, b"iCCP", data)
+
+ # You must either have sRGB or iCCP.
+ # Disallow sRGB chunks when an iCCP-chunk has been emitted.
+ chunks.remove(b"sRGB")
+
+ info = im.encoderinfo.get("pnginfo")
+ if info:
+ chunks_multiple_allowed = [b"sPLT", b"iTXt", b"tEXt", b"zTXt"]
+ for info_chunk in info.chunks:
+ cid, data = info_chunk[:2]
+ if cid in chunks:
+ chunks.remove(cid)
+ chunk(fp, cid, data)
+ elif cid in chunks_multiple_allowed:
+ chunk(fp, cid, data)
+ elif cid[1:2].islower():
+ # Private chunk
+ after_idat = info_chunk[2:3]
+ if not after_idat:
+ chunk(fp, cid, data)
+
+ if im.mode == "P":
+ palette_byte_number = colors * 3
+ palette_bytes = im.im.getpalette("RGB")[:palette_byte_number]
+ while len(palette_bytes) < palette_byte_number:
+ palette_bytes += b"\0"
+ chunk(fp, b"PLTE", palette_bytes)
+
+ transparency = im.encoderinfo.get("transparency", im.info.get("transparency", None))
+
+ if transparency or transparency == 0:
+ if im.mode == "P":
+ # limit to actual palette size
+ alpha_bytes = colors
+ if isinstance(transparency, bytes):
+ chunk(fp, b"tRNS", transparency[:alpha_bytes])
+ else:
+ transparency = max(0, min(255, transparency))
+ alpha = b"\xFF" * transparency + b"\0"
+ chunk(fp, b"tRNS", alpha[:alpha_bytes])
+ elif im.mode in ("1", "L", "I"):
+ transparency = max(0, min(65535, transparency))
+ chunk(fp, b"tRNS", o16(transparency))
+ elif im.mode == "RGB":
+ red, green, blue = transparency
+ chunk(fp, b"tRNS", o16(red) + o16(green) + o16(blue))
+ else:
+ if "transparency" in im.encoderinfo:
+ # don't bother with transparency if it's an RGBA
+ # and it's in the info dict. It's probably just stale.
+ msg = "cannot use transparency for this mode"
+ raise OSError(msg)
+ else:
+ if im.mode == "P" and im.im.getpalettemode() == "RGBA":
+ alpha = im.im.getpalette("RGBA", "A")
+ alpha_bytes = colors
+ chunk(fp, b"tRNS", alpha[:alpha_bytes])
+
+ dpi = im.encoderinfo.get("dpi")
+ if dpi:
+ chunk(
+ fp,
+ b"pHYs",
+ o32(int(dpi[0] / 0.0254 + 0.5)),
+ o32(int(dpi[1] / 0.0254 + 0.5)),
+ b"\x01",
+ )
+
+ if info:
+ chunks = [b"bKGD", b"hIST"]
+ for info_chunk in info.chunks:
+ cid, data = info_chunk[:2]
+ if cid in chunks:
+ chunks.remove(cid)
+ chunk(fp, cid, data)
+
+ exif = im.encoderinfo.get("exif")
+ if exif:
+ if isinstance(exif, Image.Exif):
+ exif = exif.tobytes(8)
+ if exif.startswith(b"Exif\x00\x00"):
+ exif = exif[6:]
+ chunk(fp, b"eXIf", exif)
+
+ if save_all:
+ im = _write_multiple_frames(
+ im, fp, chunk, rawmode, default_image, append_images
+ )
+ if im:
+ ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)])
+
+ if info:
+ for info_chunk in info.chunks:
+ cid, data = info_chunk[:2]
+ if cid[1:2].islower():
+ # Private chunk
+ after_idat = info_chunk[2:3]
+ if after_idat:
+ chunk(fp, cid, data)
+
+ chunk(fp, b"IEND", b"")
+
+ if hasattr(fp, "flush"):
+ fp.flush()
+
+
+# --------------------------------------------------------------------
+# PNG chunk converter
+
+
+def getchunks(im, **params):
+ """Return a list of PNG chunks representing this image."""
+
+ class collector:
+ data = []
+
+ def write(self, data):
+ pass
+
+ def append(self, chunk):
+ self.data.append(chunk)
+
+ def append(fp, cid, *data):
+ data = b"".join(data)
+ crc = o32(_crc32(data, _crc32(cid)))
+ fp.append((cid, data, crc))
+
+ fp = collector()
+
+ try:
+ im.encoderinfo = params
+ _save(im, fp, None, append)
+ finally:
+ del im.encoderinfo
+
+ return fp.data
+
+
+# --------------------------------------------------------------------
+# Registry
+
+Image.register_open(PngImageFile.format, PngImageFile, _accept)
+Image.register_save(PngImageFile.format, _save)
+Image.register_save_all(PngImageFile.format, _save_all)
+
+Image.register_extensions(PngImageFile.format, [".png", ".apng"])
+
+Image.register_mime(PngImageFile.format, "image/png")
diff --git a/Lib/site-packages/PIL/PpmImagePlugin.py b/Lib/site-packages/PIL/PpmImagePlugin.py
new file mode 100644
index 0000000..25dbfa5
--- /dev/null
+++ b/Lib/site-packages/PIL/PpmImagePlugin.py
@@ -0,0 +1,344 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# PPM support for PIL
+#
+# History:
+# 96-03-24 fl Created
+# 98-03-06 fl Write RGBA images (as RGB, that is)
+#
+# Copyright (c) Secret Labs AB 1997-98.
+# Copyright (c) Fredrik Lundh 1996.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+from . import Image, ImageFile
+from ._binary import i16be as i16
+from ._binary import o8
+from ._binary import o32le as o32
+
+#
+# --------------------------------------------------------------------
+
+b_whitespace = b"\x20\x09\x0a\x0b\x0c\x0d"
+
+MODES = {
+ # standard
+ b"P1": "1",
+ b"P2": "L",
+ b"P3": "RGB",
+ b"P4": "1",
+ b"P5": "L",
+ b"P6": "RGB",
+ # extensions
+ b"P0CMYK": "CMYK",
+ # PIL extensions (for test purposes only)
+ b"PyP": "P",
+ b"PyRGBA": "RGBA",
+ b"PyCMYK": "CMYK",
+}
+
+
+def _accept(prefix):
+ return prefix[0:1] == b"P" and prefix[1] in b"0123456y"
+
+
+##
+# Image plugin for PBM, PGM, and PPM images.
+
+
+class PpmImageFile(ImageFile.ImageFile):
+ format = "PPM"
+ format_description = "Pbmplus image"
+
+ def _read_magic(self):
+ magic = b""
+ # read until whitespace or longest available magic number
+ for _ in range(6):
+ c = self.fp.read(1)
+ if not c or c in b_whitespace:
+ break
+ magic += c
+ return magic
+
+ def _read_token(self):
+ token = b""
+ while len(token) <= 10: # read until next whitespace or limit of 10 characters
+ c = self.fp.read(1)
+ if not c:
+ break
+ elif c in b_whitespace: # token ended
+ if not token:
+ # skip whitespace at start
+ continue
+ break
+ elif c == b"#":
+ # ignores rest of the line; stops at CR, LF or EOF
+ while self.fp.read(1) not in b"\r\n":
+ pass
+ continue
+ token += c
+ if not token:
+ # Token was not even 1 byte
+ msg = "Reached EOF while reading header"
+ raise ValueError(msg)
+ elif len(token) > 10:
+ msg = f"Token too long in file header: {token.decode()}"
+ raise ValueError(msg)
+ return token
+
+ def _open(self):
+ magic_number = self._read_magic()
+ try:
+ mode = MODES[magic_number]
+ except KeyError:
+ msg = "not a PPM file"
+ raise SyntaxError(msg)
+
+ if magic_number in (b"P1", b"P4"):
+ self.custom_mimetype = "image/x-portable-bitmap"
+ elif magic_number in (b"P2", b"P5"):
+ self.custom_mimetype = "image/x-portable-graymap"
+ elif magic_number in (b"P3", b"P6"):
+ self.custom_mimetype = "image/x-portable-pixmap"
+
+ maxval = None
+ decoder_name = "raw"
+ if magic_number in (b"P1", b"P2", b"P3"):
+ decoder_name = "ppm_plain"
+ for ix in range(3):
+ token = int(self._read_token())
+ if ix == 0: # token is the x size
+ xsize = token
+ elif ix == 1: # token is the y size
+ ysize = token
+ if mode == "1":
+ self._mode = "1"
+ rawmode = "1;I"
+ break
+ else:
+ self._mode = rawmode = mode
+ elif ix == 2: # token is maxval
+ maxval = token
+ if not 0 < maxval < 65536:
+ msg = "maxval must be greater than 0 and less than 65536"
+ raise ValueError(msg)
+ if maxval > 255 and mode == "L":
+ self._mode = "I"
+
+ if decoder_name != "ppm_plain":
+ # If maxval matches a bit depth, use the raw decoder directly
+ if maxval == 65535 and mode == "L":
+ rawmode = "I;16B"
+ elif maxval != 255:
+ decoder_name = "ppm"
+
+ args = (rawmode, 0, 1) if decoder_name == "raw" else (rawmode, maxval)
+ self._size = xsize, ysize
+ self.tile = [(decoder_name, (0, 0, xsize, ysize), self.fp.tell(), args)]
+
+
+#
+# --------------------------------------------------------------------
+
+
+class PpmPlainDecoder(ImageFile.PyDecoder):
+ _pulls_fd = True
+
+ def _read_block(self):
+ return self.fd.read(ImageFile.SAFEBLOCK)
+
+ def _find_comment_end(self, block, start=0):
+ a = block.find(b"\n", start)
+ b = block.find(b"\r", start)
+ return min(a, b) if a * b > 0 else max(a, b) # lowest nonnegative index (or -1)
+
+ def _ignore_comments(self, block):
+ if self._comment_spans:
+ # Finish current comment
+ while block:
+ comment_end = self._find_comment_end(block)
+ if comment_end != -1:
+ # Comment ends in this block
+ # Delete tail of comment
+ block = block[comment_end + 1 :]
+ break
+ else:
+ # Comment spans whole block
+ # So read the next block, looking for the end
+ block = self._read_block()
+
+ # Search for any further comments
+ self._comment_spans = False
+ while True:
+ comment_start = block.find(b"#")
+ if comment_start == -1:
+ # No comment found
+ break
+ comment_end = self._find_comment_end(block, comment_start)
+ if comment_end != -1:
+ # Comment ends in this block
+ # Delete comment
+ block = block[:comment_start] + block[comment_end + 1 :]
+ else:
+ # Comment continues to next block(s)
+ block = block[:comment_start]
+ self._comment_spans = True
+ break
+ return block
+
+ def _decode_bitonal(self):
+ """
+ This is a separate method because in the plain PBM format, all data tokens are
+ exactly one byte, so the inter-token whitespace is optional.
+ """
+ data = bytearray()
+ total_bytes = self.state.xsize * self.state.ysize
+
+ while len(data) != total_bytes:
+ block = self._read_block() # read next block
+ if not block:
+ # eof
+ break
+
+ block = self._ignore_comments(block)
+
+ tokens = b"".join(block.split())
+ for token in tokens:
+ if token not in (48, 49):
+ msg = b"Invalid token for this mode: %s" % bytes([token])
+ raise ValueError(msg)
+ data = (data + tokens)[:total_bytes]
+ invert = bytes.maketrans(b"01", b"\xFF\x00")
+ return data.translate(invert)
+
+ def _decode_blocks(self, maxval):
+ data = bytearray()
+ max_len = 10
+ out_byte_count = 4 if self.mode == "I" else 1
+ out_max = 65535 if self.mode == "I" else 255
+ bands = Image.getmodebands(self.mode)
+ total_bytes = self.state.xsize * self.state.ysize * bands * out_byte_count
+
+ half_token = False
+ while len(data) != total_bytes:
+ block = self._read_block() # read next block
+ if not block:
+ if half_token:
+ block = bytearray(b" ") # flush half_token
+ else:
+ # eof
+ break
+
+ block = self._ignore_comments(block)
+
+ if half_token:
+ block = half_token + block # stitch half_token to new block
+ half_token = False
+
+ tokens = block.split()
+
+ if block and not block[-1:].isspace(): # block might split token
+ half_token = tokens.pop() # save half token for later
+ if len(half_token) > max_len: # prevent buildup of half_token
+ msg = (
+ b"Token too long found in data: %s" % half_token[: max_len + 1]
+ )
+ raise ValueError(msg)
+
+ for token in tokens:
+ if len(token) > max_len:
+ msg = b"Token too long found in data: %s" % token[: max_len + 1]
+ raise ValueError(msg)
+ value = int(token)
+ if value > maxval:
+ msg = f"Channel value too large for this mode: {value}"
+ raise ValueError(msg)
+ value = round(value / maxval * out_max)
+ data += o32(value) if self.mode == "I" else o8(value)
+ if len(data) == total_bytes: # finished!
+ break
+ return data
+
+ def decode(self, buffer):
+ self._comment_spans = False
+ if self.mode == "1":
+ data = self._decode_bitonal()
+ rawmode = "1;8"
+ else:
+ maxval = self.args[-1]
+ data = self._decode_blocks(maxval)
+ rawmode = "I;32" if self.mode == "I" else self.mode
+ self.set_as_raw(bytes(data), rawmode)
+ return -1, 0
+
+
+class PpmDecoder(ImageFile.PyDecoder):
+ _pulls_fd = True
+
+ def decode(self, buffer):
+ data = bytearray()
+ maxval = self.args[-1]
+ in_byte_count = 1 if maxval < 256 else 2
+ out_byte_count = 4 if self.mode == "I" else 1
+ out_max = 65535 if self.mode == "I" else 255
+ bands = Image.getmodebands(self.mode)
+ while len(data) < self.state.xsize * self.state.ysize * bands * out_byte_count:
+ pixels = self.fd.read(in_byte_count * bands)
+ if len(pixels) < in_byte_count * bands:
+ # eof
+ break
+ for b in range(bands):
+ value = (
+ pixels[b] if in_byte_count == 1 else i16(pixels, b * in_byte_count)
+ )
+ value = min(out_max, round(value / maxval * out_max))
+ data += o32(value) if self.mode == "I" else o8(value)
+ rawmode = "I;32" if self.mode == "I" else self.mode
+ self.set_as_raw(bytes(data), rawmode)
+ return -1, 0
+
+
+#
+# --------------------------------------------------------------------
+
+
+def _save(im, fp, filename):
+ if im.mode == "1":
+ rawmode, head = "1;I", b"P4"
+ elif im.mode == "L":
+ rawmode, head = "L", b"P5"
+ elif im.mode == "I":
+ rawmode, head = "I;16B", b"P5"
+ elif im.mode in ("RGB", "RGBA"):
+ rawmode, head = "RGB", b"P6"
+ else:
+ msg = f"cannot write mode {im.mode} as PPM"
+ raise OSError(msg)
+ fp.write(head + b"\n%d %d\n" % im.size)
+ if head == b"P6":
+ fp.write(b"255\n")
+ elif head == b"P5":
+ if rawmode == "L":
+ fp.write(b"255\n")
+ else:
+ fp.write(b"65535\n")
+ ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))])
+
+
+#
+# --------------------------------------------------------------------
+
+
+Image.register_open(PpmImageFile.format, PpmImageFile, _accept)
+Image.register_save(PpmImageFile.format, _save)
+
+Image.register_decoder("ppm", PpmDecoder)
+Image.register_decoder("ppm_plain", PpmPlainDecoder)
+
+Image.register_extensions(PpmImageFile.format, [".pbm", ".pgm", ".ppm", ".pnm"])
+
+Image.register_mime(PpmImageFile.format, "image/x-portable-anymap")
diff --git a/Lib/site-packages/PIL/PsdImagePlugin.py b/Lib/site-packages/PIL/PsdImagePlugin.py
new file mode 100644
index 0000000..5cff564
--- /dev/null
+++ b/Lib/site-packages/PIL/PsdImagePlugin.py
@@ -0,0 +1,307 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# Adobe PSD 2.5/3.0 file handling
+#
+# History:
+# 1995-09-01 fl Created
+# 1997-01-03 fl Read most PSD images
+# 1997-01-18 fl Fixed P and CMYK support
+# 2001-10-21 fl Added seek/tell support (for layers)
+#
+# Copyright (c) 1997-2001 by Secret Labs AB.
+# Copyright (c) 1995-2001 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import io
+
+from . import Image, ImageFile, ImagePalette
+from ._binary import i8
+from ._binary import i16be as i16
+from ._binary import i32be as i32
+from ._binary import si16be as si16
+
+MODES = {
+ # (photoshop mode, bits) -> (pil mode, required channels)
+ (0, 1): ("1", 1),
+ (0, 8): ("L", 1),
+ (1, 8): ("L", 1),
+ (2, 8): ("P", 1),
+ (3, 8): ("RGB", 3),
+ (4, 8): ("CMYK", 4),
+ (7, 8): ("L", 1), # FIXME: multilayer
+ (8, 8): ("L", 1), # duotone
+ (9, 8): ("LAB", 3),
+}
+
+
+# --------------------------------------------------------------------.
+# read PSD images
+
+
+def _accept(prefix):
+ return prefix[:4] == b"8BPS"
+
+
+##
+# Image plugin for Photoshop images.
+
+
+class PsdImageFile(ImageFile.ImageFile):
+ format = "PSD"
+ format_description = "Adobe Photoshop"
+ _close_exclusive_fp_after_loading = False
+
+ def _open(self):
+ read = self.fp.read
+
+ #
+ # header
+
+ s = read(26)
+ if not _accept(s) or i16(s, 4) != 1:
+ msg = "not a PSD file"
+ raise SyntaxError(msg)
+
+ psd_bits = i16(s, 22)
+ psd_channels = i16(s, 12)
+ psd_mode = i16(s, 24)
+
+ mode, channels = MODES[(psd_mode, psd_bits)]
+
+ if channels > psd_channels:
+ msg = "not enough channels"
+ raise OSError(msg)
+ if mode == "RGB" and psd_channels == 4:
+ mode = "RGBA"
+ channels = 4
+
+ self._mode = mode
+ self._size = i32(s, 18), i32(s, 14)
+
+ #
+ # color mode data
+
+ size = i32(read(4))
+ if size:
+ data = read(size)
+ if mode == "P" and size == 768:
+ self.palette = ImagePalette.raw("RGB;L", data)
+
+ #
+ # image resources
+
+ self.resources = []
+
+ size = i32(read(4))
+ if size:
+ # load resources
+ end = self.fp.tell() + size
+ while self.fp.tell() < end:
+ read(4) # signature
+ id = i16(read(2))
+ name = read(i8(read(1)))
+ if not (len(name) & 1):
+ read(1) # padding
+ data = read(i32(read(4)))
+ if len(data) & 1:
+ read(1) # padding
+ self.resources.append((id, name, data))
+ if id == 1039: # ICC profile
+ self.info["icc_profile"] = data
+
+ #
+ # layer and mask information
+
+ self.layers = []
+
+ size = i32(read(4))
+ if size:
+ end = self.fp.tell() + size
+ size = i32(read(4))
+ if size:
+ _layer_data = io.BytesIO(ImageFile._safe_read(self.fp, size))
+ self.layers = _layerinfo(_layer_data, size)
+ self.fp.seek(end)
+ self.n_frames = len(self.layers)
+ self.is_animated = self.n_frames > 1
+
+ #
+ # image descriptor
+
+ self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels)
+
+ # keep the file open
+ self._fp = self.fp
+ self.frame = 1
+ self._min_frame = 1
+
+ def seek(self, layer):
+ if not self._seek_check(layer):
+ return
+
+ # seek to given layer (1..max)
+ try:
+ name, mode, bbox, tile = self.layers[layer - 1]
+ self._mode = mode
+ self.tile = tile
+ self.frame = layer
+ self.fp = self._fp
+ return name, bbox
+ except IndexError as e:
+ msg = "no such layer"
+ raise EOFError(msg) from e
+
+ def tell(self):
+ # return layer number (0=image, 1..max=layers)
+ return self.frame
+
+
+def _layerinfo(fp, ct_bytes):
+ # read layerinfo block
+ layers = []
+
+ def read(size):
+ return ImageFile._safe_read(fp, size)
+
+ ct = si16(read(2))
+
+ # sanity check
+ if ct_bytes < (abs(ct) * 20):
+ msg = "Layer block too short for number of layers requested"
+ raise SyntaxError(msg)
+
+ for _ in range(abs(ct)):
+ # bounding box
+ y0 = i32(read(4))
+ x0 = i32(read(4))
+ y1 = i32(read(4))
+ x1 = i32(read(4))
+
+ # image info
+ mode = []
+ ct_types = i16(read(2))
+ types = list(range(ct_types))
+ if len(types) > 4:
+ fp.seek(len(types) * 6 + 12, io.SEEK_CUR)
+ size = i32(read(4))
+ fp.seek(size, io.SEEK_CUR)
+ continue
+
+ for _ in types:
+ type = i16(read(2))
+
+ if type == 65535:
+ m = "A"
+ else:
+ m = "RGBA"[type]
+
+ mode.append(m)
+ read(4) # size
+
+ # figure out the image mode
+ mode.sort()
+ if mode == ["R"]:
+ mode = "L"
+ elif mode == ["B", "G", "R"]:
+ mode = "RGB"
+ elif mode == ["A", "B", "G", "R"]:
+ mode = "RGBA"
+ else:
+ mode = None # unknown
+
+ # skip over blend flags and extra information
+ read(12) # filler
+ name = ""
+ size = i32(read(4)) # length of the extra data field
+ if size:
+ data_end = fp.tell() + size
+
+ length = i32(read(4))
+ if length:
+ fp.seek(length - 16, io.SEEK_CUR)
+
+ length = i32(read(4))
+ if length:
+ fp.seek(length, io.SEEK_CUR)
+
+ length = i8(read(1))
+ if length:
+ # Don't know the proper encoding,
+ # Latin-1 should be a good guess
+ name = read(length).decode("latin-1", "replace")
+
+ fp.seek(data_end)
+ layers.append((name, mode, (x0, y0, x1, y1)))
+
+ # get tiles
+ for i, (name, mode, bbox) in enumerate(layers):
+ tile = []
+ for m in mode:
+ t = _maketile(fp, m, bbox, 1)
+ if t:
+ tile.extend(t)
+ layers[i] = name, mode, bbox, tile
+
+ return layers
+
+
+def _maketile(file, mode, bbox, channels):
+ tile = None
+ read = file.read
+
+ compression = i16(read(2))
+
+ xsize = bbox[2] - bbox[0]
+ ysize = bbox[3] - bbox[1]
+
+ offset = file.tell()
+
+ if compression == 0:
+ #
+ # raw compression
+ tile = []
+ for channel in range(channels):
+ layer = mode[channel]
+ if mode == "CMYK":
+ layer += ";I"
+ tile.append(("raw", bbox, offset, layer))
+ offset = offset + xsize * ysize
+
+ elif compression == 1:
+ #
+ # packbits compression
+ i = 0
+ tile = []
+ bytecount = read(channels * ysize * 2)
+ offset = file.tell()
+ for channel in range(channels):
+ layer = mode[channel]
+ if mode == "CMYK":
+ layer += ";I"
+ tile.append(("packbits", bbox, offset, layer))
+ for y in range(ysize):
+ offset = offset + i16(bytecount, i)
+ i += 2
+
+ file.seek(offset)
+
+ if offset & 1:
+ read(1) # padding
+
+ return tile
+
+
+# --------------------------------------------------------------------
+# registry
+
+
+Image.register_open(PsdImageFile.format, PsdImageFile, _accept)
+
+Image.register_extension(PsdImageFile.format, ".psd")
+
+Image.register_mime(PsdImageFile.format, "image/vnd.adobe.photoshop")
diff --git a/Lib/site-packages/PIL/PyAccess.py b/Lib/site-packages/PIL/PyAccess.py
new file mode 100644
index 0000000..07bb712
--- /dev/null
+++ b/Lib/site-packages/PIL/PyAccess.py
@@ -0,0 +1,364 @@
+#
+# The Python Imaging Library
+# Pillow fork
+#
+# Python implementation of the PixelAccess Object
+#
+# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved.
+# Copyright (c) 1995-2009 by Fredrik Lundh.
+# Copyright (c) 2013 Eric Soroos
+#
+# See the README file for information on usage and redistribution
+#
+
+# Notes:
+#
+# * Implements the pixel access object following Access.c
+# * Taking only the tuple form, which is used from python.
+# * Fill.c uses the integer form, but it's still going to use the old
+# Access.c implementation.
+#
+from __future__ import annotations
+
+import logging
+import sys
+
+from ._deprecate import deprecate
+
+try:
+ from cffi import FFI
+
+ defs = """
+ struct Pixel_RGBA {
+ unsigned char r,g,b,a;
+ };
+ struct Pixel_I16 {
+ unsigned char l,r;
+ };
+ """
+ ffi = FFI()
+ ffi.cdef(defs)
+except ImportError as ex:
+ # Allow error import for doc purposes, but error out when accessing
+ # anything in core.
+ from ._util import DeferredError
+
+ FFI = ffi = DeferredError.new(ex)
+
+logger = logging.getLogger(__name__)
+
+
+class PyAccess:
+ def __init__(self, img, readonly=False):
+ deprecate("PyAccess", 11)
+ vals = dict(img.im.unsafe_ptrs)
+ self.readonly = readonly
+ self.image8 = ffi.cast("unsigned char **", vals["image8"])
+ self.image32 = ffi.cast("int **", vals["image32"])
+ self.image = ffi.cast("unsigned char **", vals["image"])
+ self.xsize, self.ysize = img.im.size
+ self._img = img
+
+ # Keep pointer to im object to prevent dereferencing.
+ self._im = img.im
+ if self._im.mode in ("P", "PA"):
+ self._palette = img.palette
+
+ # Debugging is polluting test traces, only useful here
+ # when hacking on PyAccess
+ # logger.debug("%s", vals)
+ self._post_init()
+
+ def _post_init(self):
+ pass
+
+ def __setitem__(self, xy, color):
+ """
+ Modifies the pixel at x,y. The color is given as a single
+ numerical value for single band images, and a tuple for
+ multi-band images
+
+ :param xy: The pixel coordinate, given as (x, y). See
+ :ref:`coordinate-system`.
+ :param color: The pixel value.
+ """
+ if self.readonly:
+ msg = "Attempt to putpixel a read only image"
+ raise ValueError(msg)
+ (x, y) = xy
+ if x < 0:
+ x = self.xsize + x
+ if y < 0:
+ y = self.ysize + y
+ (x, y) = self.check_xy((x, y))
+
+ if (
+ self._im.mode in ("P", "PA")
+ and isinstance(color, (list, tuple))
+ and len(color) in [3, 4]
+ ):
+ # RGB or RGBA value for a P or PA image
+ if self._im.mode == "PA":
+ alpha = color[3] if len(color) == 4 else 255
+ color = color[:3]
+ color = self._palette.getcolor(color, self._img)
+ if self._im.mode == "PA":
+ color = (color, alpha)
+
+ return self.set_pixel(x, y, color)
+
+ def __getitem__(self, xy):
+ """
+ Returns the pixel at x,y. The pixel is returned as a single
+ value for single band images or a tuple for multiple band
+ images
+
+ :param xy: The pixel coordinate, given as (x, y). See
+ :ref:`coordinate-system`.
+ :returns: a pixel value for single band images, a tuple of
+ pixel values for multiband images.
+ """
+ (x, y) = xy
+ if x < 0:
+ x = self.xsize + x
+ if y < 0:
+ y = self.ysize + y
+ (x, y) = self.check_xy((x, y))
+ return self.get_pixel(x, y)
+
+ putpixel = __setitem__
+ getpixel = __getitem__
+
+ def check_xy(self, xy):
+ (x, y) = xy
+ if not (0 <= x < self.xsize and 0 <= y < self.ysize):
+ msg = "pixel location out of range"
+ raise ValueError(msg)
+ return xy
+
+
+class _PyAccess32_2(PyAccess):
+ """PA, LA, stored in first and last bytes of a 32 bit word"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32)
+
+ def get_pixel(self, x, y):
+ pixel = self.pixels[y][x]
+ return pixel.r, pixel.a
+
+ def set_pixel(self, x, y, color):
+ pixel = self.pixels[y][x]
+ # tuple
+ pixel.r = min(color[0], 255)
+ pixel.a = min(color[1], 255)
+
+
+class _PyAccess32_3(PyAccess):
+ """RGB and friends, stored in the first three bytes of a 32 bit word"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32)
+
+ def get_pixel(self, x, y):
+ pixel = self.pixels[y][x]
+ return pixel.r, pixel.g, pixel.b
+
+ def set_pixel(self, x, y, color):
+ pixel = self.pixels[y][x]
+ # tuple
+ pixel.r = min(color[0], 255)
+ pixel.g = min(color[1], 255)
+ pixel.b = min(color[2], 255)
+ pixel.a = 255
+
+
+class _PyAccess32_4(PyAccess):
+ """RGBA etc, all 4 bytes of a 32 bit word"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32)
+
+ def get_pixel(self, x, y):
+ pixel = self.pixels[y][x]
+ return pixel.r, pixel.g, pixel.b, pixel.a
+
+ def set_pixel(self, x, y, color):
+ pixel = self.pixels[y][x]
+ # tuple
+ pixel.r = min(color[0], 255)
+ pixel.g = min(color[1], 255)
+ pixel.b = min(color[2], 255)
+ pixel.a = min(color[3], 255)
+
+
+class _PyAccess8(PyAccess):
+ """1, L, P, 8 bit images stored as uint8"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = self.image8
+
+ def get_pixel(self, x, y):
+ return self.pixels[y][x]
+
+ def set_pixel(self, x, y, color):
+ try:
+ # integer
+ self.pixels[y][x] = min(color, 255)
+ except TypeError:
+ # tuple
+ self.pixels[y][x] = min(color[0], 255)
+
+
+class _PyAccessI16_N(PyAccess):
+ """I;16 access, native bitendian without conversion"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = ffi.cast("unsigned short **", self.image)
+
+ def get_pixel(self, x, y):
+ return self.pixels[y][x]
+
+ def set_pixel(self, x, y, color):
+ try:
+ # integer
+ self.pixels[y][x] = min(color, 65535)
+ except TypeError:
+ # tuple
+ self.pixels[y][x] = min(color[0], 65535)
+
+
+class _PyAccessI16_L(PyAccess):
+ """I;16L access, with conversion"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = ffi.cast("struct Pixel_I16 **", self.image)
+
+ def get_pixel(self, x, y):
+ pixel = self.pixels[y][x]
+ return pixel.l + pixel.r * 256
+
+ def set_pixel(self, x, y, color):
+ pixel = self.pixels[y][x]
+ try:
+ color = min(color, 65535)
+ except TypeError:
+ color = min(color[0], 65535)
+
+ pixel.l = color & 0xFF
+ pixel.r = color >> 8
+
+
+class _PyAccessI16_B(PyAccess):
+ """I;16B access, with conversion"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = ffi.cast("struct Pixel_I16 **", self.image)
+
+ def get_pixel(self, x, y):
+ pixel = self.pixels[y][x]
+ return pixel.l * 256 + pixel.r
+
+ def set_pixel(self, x, y, color):
+ pixel = self.pixels[y][x]
+ try:
+ color = min(color, 65535)
+ except Exception:
+ color = min(color[0], 65535)
+
+ pixel.l = color >> 8
+ pixel.r = color & 0xFF
+
+
+class _PyAccessI32_N(PyAccess):
+ """Signed Int32 access, native endian"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = self.image32
+
+ def get_pixel(self, x, y):
+ return self.pixels[y][x]
+
+ def set_pixel(self, x, y, color):
+ self.pixels[y][x] = color
+
+
+class _PyAccessI32_Swap(PyAccess):
+ """I;32L/B access, with byteswapping conversion"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = self.image32
+
+ def reverse(self, i):
+ orig = ffi.new("int *", i)
+ chars = ffi.cast("unsigned char *", orig)
+ chars[0], chars[1], chars[2], chars[3] = chars[3], chars[2], chars[1], chars[0]
+ return ffi.cast("int *", chars)[0]
+
+ def get_pixel(self, x, y):
+ return self.reverse(self.pixels[y][x])
+
+ def set_pixel(self, x, y, color):
+ self.pixels[y][x] = self.reverse(color)
+
+
+class _PyAccessF(PyAccess):
+ """32 bit float access"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = ffi.cast("float **", self.image32)
+
+ def get_pixel(self, x, y):
+ return self.pixels[y][x]
+
+ def set_pixel(self, x, y, color):
+ try:
+ # not a tuple
+ self.pixels[y][x] = color
+ except TypeError:
+ # tuple
+ self.pixels[y][x] = color[0]
+
+
+mode_map = {
+ "1": _PyAccess8,
+ "L": _PyAccess8,
+ "P": _PyAccess8,
+ "I;16N": _PyAccessI16_N,
+ "LA": _PyAccess32_2,
+ "La": _PyAccess32_2,
+ "PA": _PyAccess32_2,
+ "RGB": _PyAccess32_3,
+ "LAB": _PyAccess32_3,
+ "HSV": _PyAccess32_3,
+ "YCbCr": _PyAccess32_3,
+ "RGBA": _PyAccess32_4,
+ "RGBa": _PyAccess32_4,
+ "RGBX": _PyAccess32_4,
+ "CMYK": _PyAccess32_4,
+ "F": _PyAccessF,
+ "I": _PyAccessI32_N,
+}
+
+if sys.byteorder == "little":
+ mode_map["I;16"] = _PyAccessI16_N
+ mode_map["I;16L"] = _PyAccessI16_N
+ mode_map["I;16B"] = _PyAccessI16_B
+
+ mode_map["I;32L"] = _PyAccessI32_N
+ mode_map["I;32B"] = _PyAccessI32_Swap
+else:
+ mode_map["I;16"] = _PyAccessI16_L
+ mode_map["I;16L"] = _PyAccessI16_L
+ mode_map["I;16B"] = _PyAccessI16_N
+
+ mode_map["I;32L"] = _PyAccessI32_Swap
+ mode_map["I;32B"] = _PyAccessI32_N
+
+
+def new(img, readonly=False):
+ access_type = mode_map.get(img.mode, None)
+ if not access_type:
+ logger.debug("PyAccess Not Implemented: %s", img.mode)
+ return None
+ return access_type(img, readonly)
diff --git a/Lib/site-packages/PIL/QoiImagePlugin.py b/Lib/site-packages/PIL/QoiImagePlugin.py
new file mode 100644
index 0000000..a7b9d4a
--- /dev/null
+++ b/Lib/site-packages/PIL/QoiImagePlugin.py
@@ -0,0 +1,106 @@
+#
+# The Python Imaging Library.
+#
+# QOI support for PIL
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import os
+
+from . import Image, ImageFile
+from ._binary import i32be as i32
+from ._binary import o8
+
+
+def _accept(prefix):
+ return prefix[:4] == b"qoif"
+
+
+class QoiImageFile(ImageFile.ImageFile):
+ format = "QOI"
+ format_description = "Quite OK Image"
+
+ def _open(self):
+ if not _accept(self.fp.read(4)):
+ msg = "not a QOI file"
+ raise SyntaxError(msg)
+
+ self._size = tuple(i32(self.fp.read(4)) for i in range(2))
+
+ channels = self.fp.read(1)[0]
+ self._mode = "RGB" if channels == 3 else "RGBA"
+
+ self.fp.seek(1, os.SEEK_CUR) # colorspace
+ self.tile = [("qoi", (0, 0) + self._size, self.fp.tell(), None)]
+
+
+class QoiDecoder(ImageFile.PyDecoder):
+ _pulls_fd = True
+
+ def _add_to_previous_pixels(self, value):
+ self._previous_pixel = value
+
+ r, g, b, a = value
+ hash_value = (r * 3 + g * 5 + b * 7 + a * 11) % 64
+ self._previously_seen_pixels[hash_value] = value
+
+ def decode(self, buffer):
+ self._previously_seen_pixels = {}
+ self._previous_pixel = None
+ self._add_to_previous_pixels(b"".join(o8(i) for i in (0, 0, 0, 255)))
+
+ data = bytearray()
+ bands = Image.getmodebands(self.mode)
+ while len(data) < self.state.xsize * self.state.ysize * bands:
+ byte = self.fd.read(1)[0]
+ if byte == 0b11111110: # QOI_OP_RGB
+ value = self.fd.read(3) + self._previous_pixel[3:]
+ elif byte == 0b11111111: # QOI_OP_RGBA
+ value = self.fd.read(4)
+ else:
+ op = byte >> 6
+ if op == 0: # QOI_OP_INDEX
+ op_index = byte & 0b00111111
+ value = self._previously_seen_pixels.get(op_index, (0, 0, 0, 0))
+ elif op == 1: # QOI_OP_DIFF
+ value = (
+ (self._previous_pixel[0] + ((byte & 0b00110000) >> 4) - 2)
+ % 256,
+ (self._previous_pixel[1] + ((byte & 0b00001100) >> 2) - 2)
+ % 256,
+ (self._previous_pixel[2] + (byte & 0b00000011) - 2) % 256,
+ )
+ value += (self._previous_pixel[3],)
+ elif op == 2: # QOI_OP_LUMA
+ second_byte = self.fd.read(1)[0]
+ diff_green = (byte & 0b00111111) - 32
+ diff_red = ((second_byte & 0b11110000) >> 4) - 8
+ diff_blue = (second_byte & 0b00001111) - 8
+
+ value = tuple(
+ (self._previous_pixel[i] + diff_green + diff) % 256
+ for i, diff in enumerate((diff_red, 0, diff_blue))
+ )
+ value += (self._previous_pixel[3],)
+ elif op == 3: # QOI_OP_RUN
+ run_length = (byte & 0b00111111) + 1
+ value = self._previous_pixel
+ if bands == 3:
+ value = value[:3]
+ data += value * run_length
+ continue
+ value = b"".join(o8(i) for i in value)
+ self._add_to_previous_pixels(value)
+
+ if bands == 3:
+ value = value[:3]
+ data += value
+ self.set_as_raw(bytes(data))
+ return -1, 0
+
+
+Image.register_open(QoiImageFile.format, QoiImageFile, _accept)
+Image.register_decoder("qoi", QoiDecoder)
+Image.register_extension(QoiImageFile.format, ".qoi")
diff --git a/Lib/site-packages/PIL/SgiImagePlugin.py b/Lib/site-packages/PIL/SgiImagePlugin.py
new file mode 100644
index 0000000..f9a10f6
--- /dev/null
+++ b/Lib/site-packages/PIL/SgiImagePlugin.py
@@ -0,0 +1,231 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# SGI image file handling
+#
+# See "The SGI Image File Format (Draft version 0.97)", Paul Haeberli.
+#
+#
+#
+# History:
+# 2017-22-07 mb Add RLE decompression
+# 2016-16-10 mb Add save method without compression
+# 1995-09-10 fl Created
+#
+# Copyright (c) 2016 by Mickael Bonfill.
+# Copyright (c) 2008 by Karsten Hiddemann.
+# Copyright (c) 1997 by Secret Labs AB.
+# Copyright (c) 1995 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import os
+import struct
+
+from . import Image, ImageFile
+from ._binary import i16be as i16
+from ._binary import o8
+
+
+def _accept(prefix):
+ return len(prefix) >= 2 and i16(prefix) == 474
+
+
+MODES = {
+ (1, 1, 1): "L",
+ (1, 2, 1): "L",
+ (2, 1, 1): "L;16B",
+ (2, 2, 1): "L;16B",
+ (1, 3, 3): "RGB",
+ (2, 3, 3): "RGB;16B",
+ (1, 3, 4): "RGBA",
+ (2, 3, 4): "RGBA;16B",
+}
+
+
+##
+# Image plugin for SGI images.
+class SgiImageFile(ImageFile.ImageFile):
+ format = "SGI"
+ format_description = "SGI Image File Format"
+
+ def _open(self):
+ # HEAD
+ headlen = 512
+ s = self.fp.read(headlen)
+
+ if not _accept(s):
+ msg = "Not an SGI image file"
+ raise ValueError(msg)
+
+ # compression : verbatim or RLE
+ compression = s[2]
+
+ # bpc : 1 or 2 bytes (8bits or 16bits)
+ bpc = s[3]
+
+ # dimension : 1, 2 or 3 (depending on xsize, ysize and zsize)
+ dimension = i16(s, 4)
+
+ # xsize : width
+ xsize = i16(s, 6)
+
+ # ysize : height
+ ysize = i16(s, 8)
+
+ # zsize : channels count
+ zsize = i16(s, 10)
+
+ # layout
+ layout = bpc, dimension, zsize
+
+ # determine mode from bits/zsize
+ rawmode = ""
+ try:
+ rawmode = MODES[layout]
+ except KeyError:
+ pass
+
+ if rawmode == "":
+ msg = "Unsupported SGI image mode"
+ raise ValueError(msg)
+
+ self._size = xsize, ysize
+ self._mode = rawmode.split(";")[0]
+ if self.mode == "RGB":
+ self.custom_mimetype = "image/rgb"
+
+ # orientation -1 : scanlines begins at the bottom-left corner
+ orientation = -1
+
+ # decoder info
+ if compression == 0:
+ pagesize = xsize * ysize * bpc
+ if bpc == 2:
+ self.tile = [
+ ("SGI16", (0, 0) + self.size, headlen, (self.mode, 0, orientation))
+ ]
+ else:
+ self.tile = []
+ offset = headlen
+ for layer in self.mode:
+ self.tile.append(
+ ("raw", (0, 0) + self.size, offset, (layer, 0, orientation))
+ )
+ offset += pagesize
+ elif compression == 1:
+ self.tile = [
+ ("sgi_rle", (0, 0) + self.size, headlen, (rawmode, orientation, bpc))
+ ]
+
+
+def _save(im, fp, filename):
+ if im.mode not in {"RGB", "RGBA", "L"}:
+ msg = "Unsupported SGI image mode"
+ raise ValueError(msg)
+
+ # Get the keyword arguments
+ info = im.encoderinfo
+
+ # Byte-per-pixel precision, 1 = 8bits per pixel
+ bpc = info.get("bpc", 1)
+
+ if bpc not in (1, 2):
+ msg = "Unsupported number of bytes per pixel"
+ raise ValueError(msg)
+
+ # Flip the image, since the origin of SGI file is the bottom-left corner
+ orientation = -1
+ # Define the file as SGI File Format
+ magic_number = 474
+ # Run-Length Encoding Compression - Unsupported at this time
+ rle = 0
+
+ # Number of dimensions (x,y,z)
+ dim = 3
+ # X Dimension = width / Y Dimension = height
+ x, y = im.size
+ if im.mode == "L" and y == 1:
+ dim = 1
+ elif im.mode == "L":
+ dim = 2
+ # Z Dimension: Number of channels
+ z = len(im.mode)
+
+ if dim in {1, 2}:
+ z = 1
+
+ # assert we've got the right number of bands.
+ if len(im.getbands()) != z:
+ msg = f"incorrect number of bands in SGI write: {z} vs {len(im.getbands())}"
+ raise ValueError(msg)
+
+ # Minimum Byte value
+ pinmin = 0
+ # Maximum Byte value (255 = 8bits per pixel)
+ pinmax = 255
+ # Image name (79 characters max, truncated below in write)
+ img_name = os.path.splitext(os.path.basename(filename))[0]
+ img_name = img_name.encode("ascii", "ignore")
+ # Standard representation of pixel in the file
+ colormap = 0
+ fp.write(struct.pack(">h", magic_number))
+ fp.write(o8(rle))
+ fp.write(o8(bpc))
+ fp.write(struct.pack(">H", dim))
+ fp.write(struct.pack(">H", x))
+ fp.write(struct.pack(">H", y))
+ fp.write(struct.pack(">H", z))
+ fp.write(struct.pack(">l", pinmin))
+ fp.write(struct.pack(">l", pinmax))
+ fp.write(struct.pack("4s", b"")) # dummy
+ fp.write(struct.pack("79s", img_name)) # truncates to 79 chars
+ fp.write(struct.pack("s", b"")) # force null byte after img_name
+ fp.write(struct.pack(">l", colormap))
+ fp.write(struct.pack("404s", b"")) # dummy
+
+ rawmode = "L"
+ if bpc == 2:
+ rawmode = "L;16B"
+
+ for channel in im.split():
+ fp.write(channel.tobytes("raw", rawmode, 0, orientation))
+
+ if hasattr(fp, "flush"):
+ fp.flush()
+
+
+class SGI16Decoder(ImageFile.PyDecoder):
+ _pulls_fd = True
+
+ def decode(self, buffer):
+ rawmode, stride, orientation = self.args
+ pagesize = self.state.xsize * self.state.ysize
+ zsize = len(self.mode)
+ self.fd.seek(512)
+
+ for band in range(zsize):
+ channel = Image.new("L", (self.state.xsize, self.state.ysize))
+ channel.frombytes(
+ self.fd.read(2 * pagesize), "raw", "L;16B", stride, orientation
+ )
+ self.im.putband(channel.im, band)
+
+ return -1, 0
+
+
+#
+# registry
+
+
+Image.register_decoder("SGI16", SGI16Decoder)
+Image.register_open(SgiImageFile.format, SgiImageFile, _accept)
+Image.register_save(SgiImageFile.format, _save)
+Image.register_mime(SgiImageFile.format, "image/sgi")
+
+Image.register_extensions(SgiImageFile.format, [".bw", ".rgb", ".rgba", ".sgi"])
+
+# End of file
diff --git a/Lib/site-packages/PIL/SpiderImagePlugin.py b/Lib/site-packages/PIL/SpiderImagePlugin.py
new file mode 100644
index 0000000..86582fb
--- /dev/null
+++ b/Lib/site-packages/PIL/SpiderImagePlugin.py
@@ -0,0 +1,318 @@
+#
+# The Python Imaging Library.
+#
+# SPIDER image file handling
+#
+# History:
+# 2004-08-02 Created BB
+# 2006-03-02 added save method
+# 2006-03-13 added support for stack images
+#
+# Copyright (c) 2004 by Health Research Inc. (HRI) RENSSELAER, NY 12144.
+# Copyright (c) 2004 by William Baxter.
+# Copyright (c) 2004 by Secret Labs AB.
+# Copyright (c) 2004 by Fredrik Lundh.
+#
+
+##
+# Image plugin for the Spider image format. This format is used
+# by the SPIDER software, in processing image data from electron
+# microscopy and tomography.
+##
+
+#
+# SpiderImagePlugin.py
+#
+# The Spider image format is used by SPIDER software, in processing
+# image data from electron microscopy and tomography.
+#
+# Spider home page:
+# https://spider.wadsworth.org/spider_doc/spider/docs/spider.html
+#
+# Details about the Spider image format:
+# https://spider.wadsworth.org/spider_doc/spider/docs/image_doc.html
+#
+from __future__ import annotations
+
+import os
+import struct
+import sys
+
+from . import Image, ImageFile
+
+
+def isInt(f):
+ try:
+ i = int(f)
+ if f - i == 0:
+ return 1
+ else:
+ return 0
+ except (ValueError, OverflowError):
+ return 0
+
+
+iforms = [1, 3, -11, -12, -21, -22]
+
+
+# There is no magic number to identify Spider files, so just check a
+# series of header locations to see if they have reasonable values.
+# Returns no. of bytes in the header, if it is a valid Spider header,
+# otherwise returns 0
+
+
+def isSpiderHeader(t):
+ h = (99,) + t # add 1 value so can use spider header index start=1
+ # header values 1,2,5,12,13,22,23 should be integers
+ for i in [1, 2, 5, 12, 13, 22, 23]:
+ if not isInt(h[i]):
+ return 0
+ # check iform
+ iform = int(h[5])
+ if iform not in iforms:
+ return 0
+ # check other header values
+ labrec = int(h[13]) # no. records in file header
+ labbyt = int(h[22]) # total no. of bytes in header
+ lenbyt = int(h[23]) # record length in bytes
+ if labbyt != (labrec * lenbyt):
+ return 0
+ # looks like a valid header
+ return labbyt
+
+
+def isSpiderImage(filename):
+ with open(filename, "rb") as fp:
+ f = fp.read(92) # read 23 * 4 bytes
+ t = struct.unpack(">23f", f) # try big-endian first
+ hdrlen = isSpiderHeader(t)
+ if hdrlen == 0:
+ t = struct.unpack("<23f", f) # little-endian
+ hdrlen = isSpiderHeader(t)
+ return hdrlen
+
+
+class SpiderImageFile(ImageFile.ImageFile):
+ format = "SPIDER"
+ format_description = "Spider 2D image"
+ _close_exclusive_fp_after_loading = False
+
+ def _open(self):
+ # check header
+ n = 27 * 4 # read 27 float values
+ f = self.fp.read(n)
+
+ try:
+ self.bigendian = 1
+ t = struct.unpack(">27f", f) # try big-endian first
+ hdrlen = isSpiderHeader(t)
+ if hdrlen == 0:
+ self.bigendian = 0
+ t = struct.unpack("<27f", f) # little-endian
+ hdrlen = isSpiderHeader(t)
+ if hdrlen == 0:
+ msg = "not a valid Spider file"
+ raise SyntaxError(msg)
+ except struct.error as e:
+ msg = "not a valid Spider file"
+ raise SyntaxError(msg) from e
+
+ h = (99,) + t # add 1 value : spider header index starts at 1
+ iform = int(h[5])
+ if iform != 1:
+ msg = "not a Spider 2D image"
+ raise SyntaxError(msg)
+
+ self._size = int(h[12]), int(h[2]) # size in pixels (width, height)
+ self.istack = int(h[24])
+ self.imgnumber = int(h[27])
+
+ if self.istack == 0 and self.imgnumber == 0:
+ # stk=0, img=0: a regular 2D image
+ offset = hdrlen
+ self._nimages = 1
+ elif self.istack > 0 and self.imgnumber == 0:
+ # stk>0, img=0: Opening the stack for the first time
+ self.imgbytes = int(h[12]) * int(h[2]) * 4
+ self.hdrlen = hdrlen
+ self._nimages = int(h[26])
+ # Point to the first image in the stack
+ offset = hdrlen * 2
+ self.imgnumber = 1
+ elif self.istack == 0 and self.imgnumber > 0:
+ # stk=0, img>0: an image within the stack
+ offset = hdrlen + self.stkoffset
+ self.istack = 2 # So Image knows it's still a stack
+ else:
+ msg = "inconsistent stack header values"
+ raise SyntaxError(msg)
+
+ if self.bigendian:
+ self.rawmode = "F;32BF"
+ else:
+ self.rawmode = "F;32F"
+ self._mode = "F"
+
+ self.tile = [("raw", (0, 0) + self.size, offset, (self.rawmode, 0, 1))]
+ self._fp = self.fp # FIXME: hack
+
+ @property
+ def n_frames(self):
+ return self._nimages
+
+ @property
+ def is_animated(self):
+ return self._nimages > 1
+
+ # 1st image index is zero (although SPIDER imgnumber starts at 1)
+ def tell(self):
+ if self.imgnumber < 1:
+ return 0
+ else:
+ return self.imgnumber - 1
+
+ def seek(self, frame):
+ if self.istack == 0:
+ msg = "attempt to seek in a non-stack file"
+ raise EOFError(msg)
+ if not self._seek_check(frame):
+ return
+ self.stkoffset = self.hdrlen + frame * (self.hdrlen + self.imgbytes)
+ self.fp = self._fp
+ self.fp.seek(self.stkoffset)
+ self._open()
+
+ # returns a byte image after rescaling to 0..255
+ def convert2byte(self, depth=255):
+ (minimum, maximum) = self.getextrema()
+ m = 1
+ if maximum != minimum:
+ m = depth / (maximum - minimum)
+ b = -m * minimum
+ return self.point(lambda i, m=m, b=b: i * m + b).convert("L")
+
+ # returns a ImageTk.PhotoImage object, after rescaling to 0..255
+ def tkPhotoImage(self):
+ from . import ImageTk
+
+ return ImageTk.PhotoImage(self.convert2byte(), palette=256)
+
+
+# --------------------------------------------------------------------
+# Image series
+
+
+# given a list of filenames, return a list of images
+def loadImageSeries(filelist=None):
+ """create a list of :py:class:`~PIL.Image.Image` objects for use in a montage"""
+ if filelist is None or len(filelist) < 1:
+ return
+
+ imglist = []
+ for img in filelist:
+ if not os.path.exists(img):
+ print(f"unable to find {img}")
+ continue
+ try:
+ with Image.open(img) as im:
+ im = im.convert2byte()
+ except Exception:
+ if not isSpiderImage(img):
+ print(img + " is not a Spider image file")
+ continue
+ im.info["filename"] = img
+ imglist.append(im)
+ return imglist
+
+
+# --------------------------------------------------------------------
+# For saving images in Spider format
+
+
+def makeSpiderHeader(im):
+ nsam, nrow = im.size
+ lenbyt = nsam * 4 # There are labrec records in the header
+ labrec = int(1024 / lenbyt)
+ if 1024 % lenbyt != 0:
+ labrec += 1
+ labbyt = labrec * lenbyt
+ nvalues = int(labbyt / 4)
+ if nvalues < 23:
+ return []
+
+ hdr = [0.0] * nvalues
+
+ # NB these are Fortran indices
+ hdr[1] = 1.0 # nslice (=1 for an image)
+ hdr[2] = float(nrow) # number of rows per slice
+ hdr[3] = float(nrow) # number of records in the image
+ hdr[5] = 1.0 # iform for 2D image
+ hdr[12] = float(nsam) # number of pixels per line
+ hdr[13] = float(labrec) # number of records in file header
+ hdr[22] = float(labbyt) # total number of bytes in header
+ hdr[23] = float(lenbyt) # record length in bytes
+
+ # adjust for Fortran indexing
+ hdr = hdr[1:]
+ hdr.append(0.0)
+ # pack binary data into a string
+ return [struct.pack("f", v) for v in hdr]
+
+
+def _save(im, fp, filename):
+ if im.mode[0] != "F":
+ im = im.convert("F")
+
+ hdr = makeSpiderHeader(im)
+ if len(hdr) < 256:
+ msg = "Error creating Spider header"
+ raise OSError(msg)
+
+ # write the SPIDER header
+ fp.writelines(hdr)
+
+ rawmode = "F;32NF" # 32-bit native floating point
+ ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))])
+
+
+def _save_spider(im, fp, filename):
+ # get the filename extension and register it with Image
+ ext = os.path.splitext(filename)[1]
+ Image.register_extension(SpiderImageFile.format, ext)
+ _save(im, fp, filename)
+
+
+# --------------------------------------------------------------------
+
+
+Image.register_open(SpiderImageFile.format, SpiderImageFile)
+Image.register_save(SpiderImageFile.format, _save_spider)
+
+if __name__ == "__main__":
+ if len(sys.argv) < 2:
+ print("Syntax: python3 SpiderImagePlugin.py [infile] [outfile]")
+ sys.exit()
+
+ filename = sys.argv[1]
+ if not isSpiderImage(filename):
+ print("input image must be in Spider format")
+ sys.exit()
+
+ with Image.open(filename) as im:
+ print("image: " + str(im))
+ print("format: " + str(im.format))
+ print("size: " + str(im.size))
+ print("mode: " + str(im.mode))
+ print("max, min: ", end=" ")
+ print(im.getextrema())
+
+ if len(sys.argv) > 2:
+ outfile = sys.argv[2]
+
+ # perform some image operation
+ im = im.transpose(Image.Transpose.FLIP_LEFT_RIGHT)
+ print(
+ f"saving a flipped version of {os.path.basename(filename)} "
+ f"as {outfile} "
+ )
+ im.save(outfile, SpiderImageFile.format)
diff --git a/Lib/site-packages/PIL/SunImagePlugin.py b/Lib/site-packages/PIL/SunImagePlugin.py
new file mode 100644
index 0000000..11ce3df
--- /dev/null
+++ b/Lib/site-packages/PIL/SunImagePlugin.py
@@ -0,0 +1,139 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# Sun image file handling
+#
+# History:
+# 1995-09-10 fl Created
+# 1996-05-28 fl Fixed 32-bit alignment
+# 1998-12-29 fl Import ImagePalette module
+# 2001-12-18 fl Fixed palette loading (from Jean-Claude Rimbault)
+#
+# Copyright (c) 1997-2001 by Secret Labs AB
+# Copyright (c) 1995-1996 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+from . import Image, ImageFile, ImagePalette
+from ._binary import i32be as i32
+
+
+def _accept(prefix):
+ return len(prefix) >= 4 and i32(prefix) == 0x59A66A95
+
+
+##
+# Image plugin for Sun raster files.
+
+
+class SunImageFile(ImageFile.ImageFile):
+ format = "SUN"
+ format_description = "Sun Raster File"
+
+ def _open(self):
+ # The Sun Raster file header is 32 bytes in length
+ # and has the following format:
+
+ # typedef struct _SunRaster
+ # {
+ # DWORD MagicNumber; /* Magic (identification) number */
+ # DWORD Width; /* Width of image in pixels */
+ # DWORD Height; /* Height of image in pixels */
+ # DWORD Depth; /* Number of bits per pixel */
+ # DWORD Length; /* Size of image data in bytes */
+ # DWORD Type; /* Type of raster file */
+ # DWORD ColorMapType; /* Type of color map */
+ # DWORD ColorMapLength; /* Size of the color map in bytes */
+ # } SUNRASTER;
+
+ # HEAD
+ s = self.fp.read(32)
+ if not _accept(s):
+ msg = "not an SUN raster file"
+ raise SyntaxError(msg)
+
+ offset = 32
+
+ self._size = i32(s, 4), i32(s, 8)
+
+ depth = i32(s, 12)
+ # data_length = i32(s, 16) # unreliable, ignore.
+ file_type = i32(s, 20)
+ palette_type = i32(s, 24) # 0: None, 1: RGB, 2: Raw/arbitrary
+ palette_length = i32(s, 28)
+
+ if depth == 1:
+ self._mode, rawmode = "1", "1;I"
+ elif depth == 4:
+ self._mode, rawmode = "L", "L;4"
+ elif depth == 8:
+ self._mode = rawmode = "L"
+ elif depth == 24:
+ if file_type == 3:
+ self._mode, rawmode = "RGB", "RGB"
+ else:
+ self._mode, rawmode = "RGB", "BGR"
+ elif depth == 32:
+ if file_type == 3:
+ self._mode, rawmode = "RGB", "RGBX"
+ else:
+ self._mode, rawmode = "RGB", "BGRX"
+ else:
+ msg = "Unsupported Mode/Bit Depth"
+ raise SyntaxError(msg)
+
+ if palette_length:
+ if palette_length > 1024:
+ msg = "Unsupported Color Palette Length"
+ raise SyntaxError(msg)
+
+ if palette_type != 1:
+ msg = "Unsupported Palette Type"
+ raise SyntaxError(msg)
+
+ offset = offset + palette_length
+ self.palette = ImagePalette.raw("RGB;L", self.fp.read(palette_length))
+ if self.mode == "L":
+ self._mode = "P"
+ rawmode = rawmode.replace("L", "P")
+
+ # 16 bit boundaries on stride
+ stride = ((self.size[0] * depth + 15) // 16) * 2
+
+ # file type: Type is the version (or flavor) of the bitmap
+ # file. The following values are typically found in the Type
+ # field:
+ # 0000h Old
+ # 0001h Standard
+ # 0002h Byte-encoded
+ # 0003h RGB format
+ # 0004h TIFF format
+ # 0005h IFF format
+ # FFFFh Experimental
+
+ # Old and standard are the same, except for the length tag.
+ # byte-encoded is run-length-encoded
+ # RGB looks similar to standard, but RGB byte order
+ # TIFF and IFF mean that they were converted from T/IFF
+ # Experimental means that it's something else.
+ # (https://www.fileformat.info/format/sunraster/egff.htm)
+
+ if file_type in (0, 1, 3, 4, 5):
+ self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride))]
+ elif file_type == 2:
+ self.tile = [("sun_rle", (0, 0) + self.size, offset, rawmode)]
+ else:
+ msg = "Unsupported Sun Raster file type"
+ raise SyntaxError(msg)
+
+
+#
+# registry
+
+
+Image.register_open(SunImageFile.format, SunImageFile, _accept)
+
+Image.register_extension(SunImageFile.format, ".ras")
diff --git a/Lib/site-packages/PIL/TarIO.py b/Lib/site-packages/PIL/TarIO.py
new file mode 100644
index 0000000..7470663
--- /dev/null
+++ b/Lib/site-packages/PIL/TarIO.py
@@ -0,0 +1,73 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# read files from within a tar file
+#
+# History:
+# 95-06-18 fl Created
+# 96-05-28 fl Open files in binary mode
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1995-96.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import io
+from types import TracebackType
+
+from . import ContainerIO
+
+
+class TarIO(ContainerIO.ContainerIO[bytes]):
+ """A file object that provides read access to a given member of a TAR file."""
+
+ def __init__(self, tarfile: str, file: str) -> None:
+ """
+ Create file object.
+
+ :param tarfile: Name of TAR file.
+ :param file: Name of member file.
+ """
+ self.fh = open(tarfile, "rb")
+
+ while True:
+ s = self.fh.read(512)
+ if len(s) != 512:
+ msg = "unexpected end of tar file"
+ raise OSError(msg)
+
+ name = s[:100].decode("utf-8")
+ i = name.find("\0")
+ if i == 0:
+ msg = "cannot find subfile"
+ raise OSError(msg)
+ if i > 0:
+ name = name[:i]
+
+ size = int(s[124:135], 8)
+
+ if file == name:
+ break
+
+ self.fh.seek((size + 511) & (~511), io.SEEK_CUR)
+
+ # Open region
+ super().__init__(self.fh, self.fh.tell(), size)
+
+ # Context manager support
+ def __enter__(self) -> TarIO:
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> None:
+ self.close()
+
+ def close(self) -> None:
+ self.fh.close()
diff --git a/Lib/site-packages/PIL/TgaImagePlugin.py b/Lib/site-packages/PIL/TgaImagePlugin.py
new file mode 100644
index 0000000..65c7484
--- /dev/null
+++ b/Lib/site-packages/PIL/TgaImagePlugin.py
@@ -0,0 +1,255 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# TGA file handling
+#
+# History:
+# 95-09-01 fl created (reads 24-bit files only)
+# 97-01-04 fl support more TGA versions, including compressed images
+# 98-07-04 fl fixed orientation and alpha layer bugs
+# 98-09-11 fl fixed orientation for runlength decoder
+#
+# Copyright (c) Secret Labs AB 1997-98.
+# Copyright (c) Fredrik Lundh 1995-97.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import warnings
+
+from . import Image, ImageFile, ImagePalette
+from ._binary import i16le as i16
+from ._binary import o8
+from ._binary import o16le as o16
+
+#
+# --------------------------------------------------------------------
+# Read RGA file
+
+
+MODES = {
+ # map imagetype/depth to rawmode
+ (1, 8): "P",
+ (3, 1): "1",
+ (3, 8): "L",
+ (3, 16): "LA",
+ (2, 16): "BGR;5",
+ (2, 24): "BGR",
+ (2, 32): "BGRA",
+}
+
+
+##
+# Image plugin for Targa files.
+
+
+class TgaImageFile(ImageFile.ImageFile):
+ format = "TGA"
+ format_description = "Targa"
+
+ def _open(self):
+ # process header
+ s = self.fp.read(18)
+
+ id_len = s[0]
+
+ colormaptype = s[1]
+ imagetype = s[2]
+
+ depth = s[16]
+
+ flags = s[17]
+
+ self._size = i16(s, 12), i16(s, 14)
+
+ # validate header fields
+ if (
+ colormaptype not in (0, 1)
+ or self.size[0] <= 0
+ or self.size[1] <= 0
+ or depth not in (1, 8, 16, 24, 32)
+ ):
+ msg = "not a TGA file"
+ raise SyntaxError(msg)
+
+ # image mode
+ if imagetype in (3, 11):
+ self._mode = "L"
+ if depth == 1:
+ self._mode = "1" # ???
+ elif depth == 16:
+ self._mode = "LA"
+ elif imagetype in (1, 9):
+ self._mode = "P"
+ elif imagetype in (2, 10):
+ self._mode = "RGB"
+ if depth == 32:
+ self._mode = "RGBA"
+ else:
+ msg = "unknown TGA mode"
+ raise SyntaxError(msg)
+
+ # orientation
+ orientation = flags & 0x30
+ self._flip_horizontally = orientation in [0x10, 0x30]
+ if orientation in [0x20, 0x30]:
+ orientation = 1
+ elif orientation in [0, 0x10]:
+ orientation = -1
+ else:
+ msg = "unknown TGA orientation"
+ raise SyntaxError(msg)
+
+ self.info["orientation"] = orientation
+
+ if imagetype & 8:
+ self.info["compression"] = "tga_rle"
+
+ if id_len:
+ self.info["id_section"] = self.fp.read(id_len)
+
+ if colormaptype:
+ # read palette
+ start, size, mapdepth = i16(s, 3), i16(s, 5), s[7]
+ if mapdepth == 16:
+ self.palette = ImagePalette.raw(
+ "BGR;15", b"\0" * 2 * start + self.fp.read(2 * size)
+ )
+ elif mapdepth == 24:
+ self.palette = ImagePalette.raw(
+ "BGR", b"\0" * 3 * start + self.fp.read(3 * size)
+ )
+ elif mapdepth == 32:
+ self.palette = ImagePalette.raw(
+ "BGRA", b"\0" * 4 * start + self.fp.read(4 * size)
+ )
+
+ # setup tile descriptor
+ try:
+ rawmode = MODES[(imagetype & 7, depth)]
+ if imagetype & 8:
+ # compressed
+ self.tile = [
+ (
+ "tga_rle",
+ (0, 0) + self.size,
+ self.fp.tell(),
+ (rawmode, orientation, depth),
+ )
+ ]
+ else:
+ self.tile = [
+ (
+ "raw",
+ (0, 0) + self.size,
+ self.fp.tell(),
+ (rawmode, 0, orientation),
+ )
+ ]
+ except KeyError:
+ pass # cannot decode
+
+ def load_end(self):
+ if self._flip_horizontally:
+ self.im = self.im.transpose(Image.Transpose.FLIP_LEFT_RIGHT)
+
+
+#
+# --------------------------------------------------------------------
+# Write TGA file
+
+
+SAVE = {
+ "1": ("1", 1, 0, 3),
+ "L": ("L", 8, 0, 3),
+ "LA": ("LA", 16, 0, 3),
+ "P": ("P", 8, 1, 1),
+ "RGB": ("BGR", 24, 0, 2),
+ "RGBA": ("BGRA", 32, 0, 2),
+}
+
+
+def _save(im, fp, filename):
+ try:
+ rawmode, bits, colormaptype, imagetype = SAVE[im.mode]
+ except KeyError as e:
+ msg = f"cannot write mode {im.mode} as TGA"
+ raise OSError(msg) from e
+
+ if "rle" in im.encoderinfo:
+ rle = im.encoderinfo["rle"]
+ else:
+ compression = im.encoderinfo.get("compression", im.info.get("compression"))
+ rle = compression == "tga_rle"
+ if rle:
+ imagetype += 8
+
+ id_section = im.encoderinfo.get("id_section", im.info.get("id_section", ""))
+ id_len = len(id_section)
+ if id_len > 255:
+ id_len = 255
+ id_section = id_section[:255]
+ warnings.warn("id_section has been trimmed to 255 characters")
+
+ if colormaptype:
+ palette = im.im.getpalette("RGB", "BGR")
+ colormaplength, colormapentry = len(palette) // 3, 24
+ else:
+ colormaplength, colormapentry = 0, 0
+
+ if im.mode in ("LA", "RGBA"):
+ flags = 8
+ else:
+ flags = 0
+
+ orientation = im.encoderinfo.get("orientation", im.info.get("orientation", -1))
+ if orientation > 0:
+ flags = flags | 0x20
+
+ fp.write(
+ o8(id_len)
+ + o8(colormaptype)
+ + o8(imagetype)
+ + o16(0) # colormapfirst
+ + o16(colormaplength)
+ + o8(colormapentry)
+ + o16(0)
+ + o16(0)
+ + o16(im.size[0])
+ + o16(im.size[1])
+ + o8(bits)
+ + o8(flags)
+ )
+
+ if id_section:
+ fp.write(id_section)
+
+ if colormaptype:
+ fp.write(palette)
+
+ if rle:
+ ImageFile._save(
+ im, fp, [("tga_rle", (0, 0) + im.size, 0, (rawmode, orientation))]
+ )
+ else:
+ ImageFile._save(
+ im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))]
+ )
+
+ # write targa version 2 footer
+ fp.write(b"\000" * 8 + b"TRUEVISION-XFILE." + b"\000")
+
+
+#
+# --------------------------------------------------------------------
+# Registry
+
+
+Image.register_open(TgaImageFile.format, TgaImageFile)
+Image.register_save(TgaImageFile.format, _save)
+
+Image.register_extensions(TgaImageFile.format, [".tga", ".icb", ".vda", ".vst"])
+
+Image.register_mime(TgaImageFile.format, "image/x-tga")
diff --git a/Lib/site-packages/PIL/TiffImagePlugin.py b/Lib/site-packages/PIL/TiffImagePlugin.py
new file mode 100644
index 0000000..e20d4d5
--- /dev/null
+++ b/Lib/site-packages/PIL/TiffImagePlugin.py
@@ -0,0 +1,2159 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# TIFF file handling
+#
+# TIFF is a flexible, if somewhat aged, image file format originally
+# defined by Aldus. Although TIFF supports a wide variety of pixel
+# layouts and compression methods, the name doesn't really stand for
+# "thousands of incompatible file formats," it just feels that way.
+#
+# To read TIFF data from a stream, the stream must be seekable. For
+# progressive decoding, make sure to use TIFF files where the tag
+# directory is placed first in the file.
+#
+# History:
+# 1995-09-01 fl Created
+# 1996-05-04 fl Handle JPEGTABLES tag
+# 1996-05-18 fl Fixed COLORMAP support
+# 1997-01-05 fl Fixed PREDICTOR support
+# 1997-08-27 fl Added support for rational tags (from Perry Stoll)
+# 1998-01-10 fl Fixed seek/tell (from Jan Blom)
+# 1998-07-15 fl Use private names for internal variables
+# 1999-06-13 fl Rewritten for PIL 1.0 (1.0)
+# 2000-10-11 fl Additional fixes for Python 2.0 (1.1)
+# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2)
+# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3)
+# 2001-12-18 fl Added workaround for broken Matrox library
+# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart)
+# 2003-05-19 fl Check FILLORDER tag
+# 2003-09-26 fl Added RGBa support
+# 2004-02-24 fl Added DPI support; fixed rational write support
+# 2005-02-07 fl Added workaround for broken Corel Draw 10 files
+# 2006-01-09 fl Added support for float/double tags (from Russell Nelson)
+#
+# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved.
+# Copyright (c) 1995-1997 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import io
+import itertools
+import logging
+import math
+import os
+import struct
+import warnings
+from collections.abc import MutableMapping
+from fractions import Fraction
+from numbers import Number, Rational
+
+from . import ExifTags, Image, ImageFile, ImageOps, ImagePalette, TiffTags
+from ._binary import i16be as i16
+from ._binary import i32be as i32
+from ._binary import o8
+from .TiffTags import TYPES
+
+logger = logging.getLogger(__name__)
+
+# Set these to true to force use of libtiff for reading or writing.
+READ_LIBTIFF = False
+WRITE_LIBTIFF = False
+IFD_LEGACY_API = True
+STRIP_SIZE = 65536
+
+II = b"II" # little-endian (Intel style)
+MM = b"MM" # big-endian (Motorola style)
+
+#
+# --------------------------------------------------------------------
+# Read TIFF files
+
+# a few tag names, just to make the code below a bit more readable
+IMAGEWIDTH = 256
+IMAGELENGTH = 257
+BITSPERSAMPLE = 258
+COMPRESSION = 259
+PHOTOMETRIC_INTERPRETATION = 262
+FILLORDER = 266
+IMAGEDESCRIPTION = 270
+STRIPOFFSETS = 273
+SAMPLESPERPIXEL = 277
+ROWSPERSTRIP = 278
+STRIPBYTECOUNTS = 279
+X_RESOLUTION = 282
+Y_RESOLUTION = 283
+PLANAR_CONFIGURATION = 284
+RESOLUTION_UNIT = 296
+TRANSFERFUNCTION = 301
+SOFTWARE = 305
+DATE_TIME = 306
+ARTIST = 315
+PREDICTOR = 317
+COLORMAP = 320
+TILEWIDTH = 322
+TILELENGTH = 323
+TILEOFFSETS = 324
+TILEBYTECOUNTS = 325
+SUBIFD = 330
+EXTRASAMPLES = 338
+SAMPLEFORMAT = 339
+JPEGTABLES = 347
+YCBCRSUBSAMPLING = 530
+REFERENCEBLACKWHITE = 532
+COPYRIGHT = 33432
+IPTC_NAA_CHUNK = 33723 # newsphoto properties
+PHOTOSHOP_CHUNK = 34377 # photoshop properties
+ICCPROFILE = 34675
+EXIFIFD = 34665
+XMP = 700
+JPEGQUALITY = 65537 # pseudo-tag by libtiff
+
+# https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java
+IMAGEJ_META_DATA_BYTE_COUNTS = 50838
+IMAGEJ_META_DATA = 50839
+
+COMPRESSION_INFO = {
+ # Compression => pil compression name
+ 1: "raw",
+ 2: "tiff_ccitt",
+ 3: "group3",
+ 4: "group4",
+ 5: "tiff_lzw",
+ 6: "tiff_jpeg", # obsolete
+ 7: "jpeg",
+ 8: "tiff_adobe_deflate",
+ 32771: "tiff_raw_16", # 16-bit padding
+ 32773: "packbits",
+ 32809: "tiff_thunderscan",
+ 32946: "tiff_deflate",
+ 34676: "tiff_sgilog",
+ 34677: "tiff_sgilog24",
+ 34925: "lzma",
+ 50000: "zstd",
+ 50001: "webp",
+}
+
+COMPRESSION_INFO_REV = {v: k for k, v in COMPRESSION_INFO.items()}
+
+OPEN_INFO = {
+ # (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample,
+ # ExtraSamples) => mode, rawmode
+ (II, 0, (1,), 1, (1,), ()): ("1", "1;I"),
+ (MM, 0, (1,), 1, (1,), ()): ("1", "1;I"),
+ (II, 0, (1,), 2, (1,), ()): ("1", "1;IR"),
+ (MM, 0, (1,), 2, (1,), ()): ("1", "1;IR"),
+ (II, 1, (1,), 1, (1,), ()): ("1", "1"),
+ (MM, 1, (1,), 1, (1,), ()): ("1", "1"),
+ (II, 1, (1,), 2, (1,), ()): ("1", "1;R"),
+ (MM, 1, (1,), 2, (1,), ()): ("1", "1;R"),
+ (II, 0, (1,), 1, (2,), ()): ("L", "L;2I"),
+ (MM, 0, (1,), 1, (2,), ()): ("L", "L;2I"),
+ (II, 0, (1,), 2, (2,), ()): ("L", "L;2IR"),
+ (MM, 0, (1,), 2, (2,), ()): ("L", "L;2IR"),
+ (II, 1, (1,), 1, (2,), ()): ("L", "L;2"),
+ (MM, 1, (1,), 1, (2,), ()): ("L", "L;2"),
+ (II, 1, (1,), 2, (2,), ()): ("L", "L;2R"),
+ (MM, 1, (1,), 2, (2,), ()): ("L", "L;2R"),
+ (II, 0, (1,), 1, (4,), ()): ("L", "L;4I"),
+ (MM, 0, (1,), 1, (4,), ()): ("L", "L;4I"),
+ (II, 0, (1,), 2, (4,), ()): ("L", "L;4IR"),
+ (MM, 0, (1,), 2, (4,), ()): ("L", "L;4IR"),
+ (II, 1, (1,), 1, (4,), ()): ("L", "L;4"),
+ (MM, 1, (1,), 1, (4,), ()): ("L", "L;4"),
+ (II, 1, (1,), 2, (4,), ()): ("L", "L;4R"),
+ (MM, 1, (1,), 2, (4,), ()): ("L", "L;4R"),
+ (II, 0, (1,), 1, (8,), ()): ("L", "L;I"),
+ (MM, 0, (1,), 1, (8,), ()): ("L", "L;I"),
+ (II, 0, (1,), 2, (8,), ()): ("L", "L;IR"),
+ (MM, 0, (1,), 2, (8,), ()): ("L", "L;IR"),
+ (II, 1, (1,), 1, (8,), ()): ("L", "L"),
+ (MM, 1, (1,), 1, (8,), ()): ("L", "L"),
+ (II, 1, (2,), 1, (8,), ()): ("L", "L"),
+ (MM, 1, (2,), 1, (8,), ()): ("L", "L"),
+ (II, 1, (1,), 2, (8,), ()): ("L", "L;R"),
+ (MM, 1, (1,), 2, (8,), ()): ("L", "L;R"),
+ (II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"),
+ (II, 0, (1,), 1, (16,), ()): ("I;16", "I;16"),
+ (II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"),
+ (MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"),
+ (II, 1, (1,), 2, (16,), ()): ("I;16", "I;16R"),
+ (II, 1, (2,), 1, (16,), ()): ("I", "I;16S"),
+ (MM, 1, (2,), 1, (16,), ()): ("I", "I;16BS"),
+ (II, 0, (3,), 1, (32,), ()): ("F", "F;32F"),
+ (MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"),
+ (II, 1, (1,), 1, (32,), ()): ("I", "I;32N"),
+ (II, 1, (2,), 1, (32,), ()): ("I", "I;32S"),
+ (MM, 1, (2,), 1, (32,), ()): ("I", "I;32BS"),
+ (II, 1, (3,), 1, (32,), ()): ("F", "F;32F"),
+ (MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"),
+ (II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"),
+ (MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"),
+ (II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"),
+ (MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"),
+ (II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"),
+ (MM, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"),
+ (II, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples
+ (MM, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples
+ (II, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"),
+ (II, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGBX", "RGBXX"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGBX", "RGBXX"),
+ (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGBX", "RGBXXX"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGBX", "RGBXXX"),
+ (II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
+ (II, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"),
+ (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"),
+ (II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
+ (II, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"),
+ (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"),
+ (II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10
+ (MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10
+ (II, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16L"),
+ (MM, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16B"),
+ (II, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16L"),
+ (MM, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16B"),
+ (II, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGBX", "RGBX;16L"),
+ (MM, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGBX", "RGBX;16B"),
+ (II, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16L"),
+ (MM, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16B"),
+ (II, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16L"),
+ (MM, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16B"),
+ (II, 3, (1,), 1, (1,), ()): ("P", "P;1"),
+ (MM, 3, (1,), 1, (1,), ()): ("P", "P;1"),
+ (II, 3, (1,), 2, (1,), ()): ("P", "P;1R"),
+ (MM, 3, (1,), 2, (1,), ()): ("P", "P;1R"),
+ (II, 3, (1,), 1, (2,), ()): ("P", "P;2"),
+ (MM, 3, (1,), 1, (2,), ()): ("P", "P;2"),
+ (II, 3, (1,), 2, (2,), ()): ("P", "P;2R"),
+ (MM, 3, (1,), 2, (2,), ()): ("P", "P;2R"),
+ (II, 3, (1,), 1, (4,), ()): ("P", "P;4"),
+ (MM, 3, (1,), 1, (4,), ()): ("P", "P;4"),
+ (II, 3, (1,), 2, (4,), ()): ("P", "P;4R"),
+ (MM, 3, (1,), 2, (4,), ()): ("P", "P;4R"),
+ (II, 3, (1,), 1, (8,), ()): ("P", "P"),
+ (MM, 3, (1,), 1, (8,), ()): ("P", "P"),
+ (II, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"),
+ (MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"),
+ (II, 3, (1,), 2, (8,), ()): ("P", "P;R"),
+ (MM, 3, (1,), 2, (8,), ()): ("P", "P;R"),
+ (II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"),
+ (MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"),
+ (II, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"),
+ (MM, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"),
+ (II, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"),
+ (MM, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"),
+ (II, 5, (1,), 1, (16, 16, 16, 16), ()): ("CMYK", "CMYK;16L"),
+ (II, 6, (1,), 1, (8,), ()): ("L", "L"),
+ (MM, 6, (1,), 1, (8,), ()): ("L", "L"),
+ # JPEG compressed images handled by LibTiff and auto-converted to RGBX
+ # Minimal Baseline TIFF requires YCbCr images to have 3 SamplesPerPixel
+ (II, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"),
+ (MM, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"),
+ (II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"),
+ (MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"),
+}
+
+MAX_SAMPLESPERPIXEL = max(len(key_tp[4]) for key_tp in OPEN_INFO)
+
+PREFIXES = [
+ b"MM\x00\x2A", # Valid TIFF header with big-endian byte order
+ b"II\x2A\x00", # Valid TIFF header with little-endian byte order
+ b"MM\x2A\x00", # Invalid TIFF header, assume big-endian
+ b"II\x00\x2A", # Invalid TIFF header, assume little-endian
+ b"MM\x00\x2B", # BigTIFF with big-endian byte order
+ b"II\x2B\x00", # BigTIFF with little-endian byte order
+]
+
+
+def _accept(prefix):
+ return prefix[:4] in PREFIXES
+
+
+def _limit_rational(val, max_val):
+ inv = abs(val) > 1
+ n_d = IFDRational(1 / val if inv else val).limit_rational(max_val)
+ return n_d[::-1] if inv else n_d
+
+
+def _limit_signed_rational(val, max_val, min_val):
+ frac = Fraction(val)
+ n_d = frac.numerator, frac.denominator
+
+ if min(n_d) < min_val:
+ n_d = _limit_rational(val, abs(min_val))
+
+ if max(n_d) > max_val:
+ val = Fraction(*n_d)
+ n_d = _limit_rational(val, max_val)
+
+ return n_d
+
+
+##
+# Wrapper for TIFF IFDs.
+
+_load_dispatch = {}
+_write_dispatch = {}
+
+
+class IFDRational(Rational):
+ """Implements a rational class where 0/0 is a legal value to match
+ the in the wild use of exif rationals.
+
+ e.g., DigitalZoomRatio - 0.00/0.00 indicates that no digital zoom was used
+ """
+
+ """ If the denominator is 0, store this as a float('nan'), otherwise store
+ as a fractions.Fraction(). Delegate as appropriate
+
+ """
+
+ __slots__ = ("_numerator", "_denominator", "_val")
+
+ def __init__(self, value, denominator=1):
+ """
+ :param value: either an integer numerator, a
+ float/rational/other number, or an IFDRational
+ :param denominator: Optional integer denominator
+ """
+ if isinstance(value, IFDRational):
+ self._numerator = value.numerator
+ self._denominator = value.denominator
+ self._val = value._val
+ return
+
+ if isinstance(value, Fraction):
+ self._numerator = value.numerator
+ self._denominator = value.denominator
+ else:
+ self._numerator = value
+ self._denominator = denominator
+
+ if denominator == 0:
+ self._val = float("nan")
+ elif denominator == 1:
+ self._val = Fraction(value)
+ else:
+ self._val = Fraction(value, denominator)
+
+ @property
+ def numerator(self):
+ return self._numerator
+
+ @property
+ def denominator(self):
+ return self._denominator
+
+ def limit_rational(self, max_denominator):
+ """
+
+ :param max_denominator: Integer, the maximum denominator value
+ :returns: Tuple of (numerator, denominator)
+ """
+
+ if self.denominator == 0:
+ return self.numerator, self.denominator
+
+ f = self._val.limit_denominator(max_denominator)
+ return f.numerator, f.denominator
+
+ def __repr__(self):
+ return str(float(self._val))
+
+ def __hash__(self):
+ return self._val.__hash__()
+
+ def __eq__(self, other):
+ val = self._val
+ if isinstance(other, IFDRational):
+ other = other._val
+ if isinstance(other, float):
+ val = float(val)
+ return val == other
+
+ def __getstate__(self):
+ return [self._val, self._numerator, self._denominator]
+
+ def __setstate__(self, state):
+ IFDRational.__init__(self, 0)
+ _val, _numerator, _denominator = state
+ self._val = _val
+ self._numerator = _numerator
+ self._denominator = _denominator
+
+ def _delegate(op):
+ def delegate(self, *args):
+ return getattr(self._val, op)(*args)
+
+ return delegate
+
+ """ a = ['add','radd', 'sub', 'rsub', 'mul', 'rmul',
+ 'truediv', 'rtruediv', 'floordiv', 'rfloordiv',
+ 'mod','rmod', 'pow','rpow', 'pos', 'neg',
+ 'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'bool',
+ 'ceil', 'floor', 'round']
+ print("\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a))
+ """
+
+ __add__ = _delegate("__add__")
+ __radd__ = _delegate("__radd__")
+ __sub__ = _delegate("__sub__")
+ __rsub__ = _delegate("__rsub__")
+ __mul__ = _delegate("__mul__")
+ __rmul__ = _delegate("__rmul__")
+ __truediv__ = _delegate("__truediv__")
+ __rtruediv__ = _delegate("__rtruediv__")
+ __floordiv__ = _delegate("__floordiv__")
+ __rfloordiv__ = _delegate("__rfloordiv__")
+ __mod__ = _delegate("__mod__")
+ __rmod__ = _delegate("__rmod__")
+ __pow__ = _delegate("__pow__")
+ __rpow__ = _delegate("__rpow__")
+ __pos__ = _delegate("__pos__")
+ __neg__ = _delegate("__neg__")
+ __abs__ = _delegate("__abs__")
+ __trunc__ = _delegate("__trunc__")
+ __lt__ = _delegate("__lt__")
+ __gt__ = _delegate("__gt__")
+ __le__ = _delegate("__le__")
+ __ge__ = _delegate("__ge__")
+ __bool__ = _delegate("__bool__")
+ __ceil__ = _delegate("__ceil__")
+ __floor__ = _delegate("__floor__")
+ __round__ = _delegate("__round__")
+ # Python >= 3.11
+ if hasattr(Fraction, "__int__"):
+ __int__ = _delegate("__int__")
+
+
+class ImageFileDirectory_v2(MutableMapping):
+ """This class represents a TIFF tag directory. To speed things up, we
+ don't decode tags unless they're asked for.
+
+ Exposes a dictionary interface of the tags in the directory::
+
+ ifd = ImageFileDirectory_v2()
+ ifd[key] = 'Some Data'
+ ifd.tagtype[key] = TiffTags.ASCII
+ print(ifd[key])
+ 'Some Data'
+
+ Individual values are returned as the strings or numbers, sequences are
+ returned as tuples of the values.
+
+ The tiff metadata type of each item is stored in a dictionary of
+ tag types in
+ :attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype`. The types
+ are read from a tiff file, guessed from the type added, or added
+ manually.
+
+ Data Structures:
+
+ * ``self.tagtype = {}``
+
+ * Key: numerical TIFF tag number
+ * Value: integer corresponding to the data type from
+ :py:data:`.TiffTags.TYPES`
+
+ .. versionadded:: 3.0.0
+
+ 'Internal' data structures:
+
+ * ``self._tags_v2 = {}``
+
+ * Key: numerical TIFF tag number
+ * Value: decoded data, as tuple for multiple values
+
+ * ``self._tagdata = {}``
+
+ * Key: numerical TIFF tag number
+ * Value: undecoded byte string from file
+
+ * ``self._tags_v1 = {}``
+
+ * Key: numerical TIFF tag number
+ * Value: decoded data in the v1 format
+
+ Tags will be found in the private attributes ``self._tagdata``, and in
+ ``self._tags_v2`` once decoded.
+
+ ``self.legacy_api`` is a value for internal use, and shouldn't be changed
+ from outside code. In cooperation with
+ :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`, if ``legacy_api``
+ is true, then decoded tags will be populated into both ``_tags_v1`` and
+ ``_tags_v2``. ``_tags_v2`` will be used if this IFD is used in the TIFF
+ save routine. Tags should be read from ``_tags_v1`` if
+ ``legacy_api == true``.
+
+ """
+
+ def __init__(self, ifh=b"II\052\0\0\0\0\0", prefix=None, group=None):
+ """Initialize an ImageFileDirectory.
+
+ To construct an ImageFileDirectory from a real file, pass the 8-byte
+ magic header to the constructor. To only set the endianness, pass it
+ as the 'prefix' keyword argument.
+
+ :param ifh: One of the accepted magic headers (cf. PREFIXES); also sets
+ endianness.
+ :param prefix: Override the endianness of the file.
+ """
+ if not _accept(ifh):
+ msg = f"not a TIFF file (header {repr(ifh)} not valid)"
+ raise SyntaxError(msg)
+ self._prefix = prefix if prefix is not None else ifh[:2]
+ if self._prefix == MM:
+ self._endian = ">"
+ elif self._prefix == II:
+ self._endian = "<"
+ else:
+ msg = "not a TIFF IFD"
+ raise SyntaxError(msg)
+ self._bigtiff = ifh[2] == 43
+ self.group = group
+ self.tagtype = {}
+ """ Dictionary of tag types """
+ self.reset()
+ (self.next,) = (
+ self._unpack("Q", ifh[8:]) if self._bigtiff else self._unpack("L", ifh[4:])
+ )
+ self._legacy_api = False
+
+ prefix = property(lambda self: self._prefix)
+ offset = property(lambda self: self._offset)
+ legacy_api = property(lambda self: self._legacy_api)
+
+ @legacy_api.setter
+ def legacy_api(self, value):
+ msg = "Not allowing setting of legacy api"
+ raise Exception(msg)
+
+ def reset(self):
+ self._tags_v1 = {} # will remain empty if legacy_api is false
+ self._tags_v2 = {} # main tag storage
+ self._tagdata = {}
+ self.tagtype = {} # added 2008-06-05 by Florian Hoech
+ self._next = None
+ self._offset = None
+
+ def __str__(self):
+ return str(dict(self))
+
+ def named(self):
+ """
+ :returns: dict of name|key: value
+
+ Returns the complete tag dictionary, with named tags where possible.
+ """
+ return {
+ TiffTags.lookup(code, self.group).name: value
+ for code, value in self.items()
+ }
+
+ def __len__(self):
+ return len(set(self._tagdata) | set(self._tags_v2))
+
+ def __getitem__(self, tag):
+ if tag not in self._tags_v2: # unpack on the fly
+ data = self._tagdata[tag]
+ typ = self.tagtype[tag]
+ size, handler = self._load_dispatch[typ]
+ self[tag] = handler(self, data, self.legacy_api) # check type
+ val = self._tags_v2[tag]
+ if self.legacy_api and not isinstance(val, (tuple, bytes)):
+ val = (val,)
+ return val
+
+ def __contains__(self, tag):
+ return tag in self._tags_v2 or tag in self._tagdata
+
+ def __setitem__(self, tag, value):
+ self._setitem(tag, value, self.legacy_api)
+
+ def _setitem(self, tag, value, legacy_api):
+ basetypes = (Number, bytes, str)
+
+ info = TiffTags.lookup(tag, self.group)
+ values = [value] if isinstance(value, basetypes) else value
+
+ if tag not in self.tagtype:
+ if info.type:
+ self.tagtype[tag] = info.type
+ else:
+ self.tagtype[tag] = TiffTags.UNDEFINED
+ if all(isinstance(v, IFDRational) for v in values):
+ self.tagtype[tag] = (
+ TiffTags.RATIONAL
+ if all(v >= 0 for v in values)
+ else TiffTags.SIGNED_RATIONAL
+ )
+ elif all(isinstance(v, int) for v in values):
+ if all(0 <= v < 2**16 for v in values):
+ self.tagtype[tag] = TiffTags.SHORT
+ elif all(-(2**15) < v < 2**15 for v in values):
+ self.tagtype[tag] = TiffTags.SIGNED_SHORT
+ else:
+ self.tagtype[tag] = (
+ TiffTags.LONG
+ if all(v >= 0 for v in values)
+ else TiffTags.SIGNED_LONG
+ )
+ elif all(isinstance(v, float) for v in values):
+ self.tagtype[tag] = TiffTags.DOUBLE
+ elif all(isinstance(v, str) for v in values):
+ self.tagtype[tag] = TiffTags.ASCII
+ elif all(isinstance(v, bytes) for v in values):
+ self.tagtype[tag] = TiffTags.BYTE
+
+ if self.tagtype[tag] == TiffTags.UNDEFINED:
+ values = [
+ v.encode("ascii", "replace") if isinstance(v, str) else v
+ for v in values
+ ]
+ elif self.tagtype[tag] == TiffTags.RATIONAL:
+ values = [float(v) if isinstance(v, int) else v for v in values]
+
+ is_ifd = self.tagtype[tag] == TiffTags.LONG and isinstance(values, dict)
+ if not is_ifd:
+ values = tuple(info.cvt_enum(value) for value in values)
+
+ dest = self._tags_v1 if legacy_api else self._tags_v2
+
+ # Three branches:
+ # Spec'd length == 1, Actual length 1, store as element
+ # Spec'd length == 1, Actual > 1, Warn and truncate. Formerly barfed.
+ # No Spec, Actual length 1, Formerly (<4.2) returned a 1 element tuple.
+ # Don't mess with the legacy api, since it's frozen.
+ if not is_ifd and (
+ (info.length == 1)
+ or self.tagtype[tag] == TiffTags.BYTE
+ or (info.length is None and len(values) == 1 and not legacy_api)
+ ):
+ # Don't mess with the legacy api, since it's frozen.
+ if legacy_api and self.tagtype[tag] in [
+ TiffTags.RATIONAL,
+ TiffTags.SIGNED_RATIONAL,
+ ]: # rationals
+ values = (values,)
+ try:
+ (dest[tag],) = values
+ except ValueError:
+ # We've got a builtin tag with 1 expected entry
+ warnings.warn(
+ f"Metadata Warning, tag {tag} had too many entries: "
+ f"{len(values)}, expected 1"
+ )
+ dest[tag] = values[0]
+
+ else:
+ # Spec'd length > 1 or undefined
+ # Unspec'd, and length > 1
+ dest[tag] = values
+
+ def __delitem__(self, tag):
+ self._tags_v2.pop(tag, None)
+ self._tags_v1.pop(tag, None)
+ self._tagdata.pop(tag, None)
+
+ def __iter__(self):
+ return iter(set(self._tagdata) | set(self._tags_v2))
+
+ def _unpack(self, fmt, data):
+ return struct.unpack(self._endian + fmt, data)
+
+ def _pack(self, fmt, *values):
+ return struct.pack(self._endian + fmt, *values)
+
+ def _register_loader(idx, size):
+ def decorator(func):
+ from .TiffTags import TYPES
+
+ if func.__name__.startswith("load_"):
+ TYPES[idx] = func.__name__[5:].replace("_", " ")
+ _load_dispatch[idx] = size, func # noqa: F821
+ return func
+
+ return decorator
+
+ def _register_writer(idx):
+ def decorator(func):
+ _write_dispatch[idx] = func # noqa: F821
+ return func
+
+ return decorator
+
+ def _register_basic(idx_fmt_name):
+ from .TiffTags import TYPES
+
+ idx, fmt, name = idx_fmt_name
+ TYPES[idx] = name
+ size = struct.calcsize("=" + fmt)
+ _load_dispatch[idx] = ( # noqa: F821
+ size,
+ lambda self, data, legacy_api=True: (
+ self._unpack(f"{len(data) // size}{fmt}", data)
+ ),
+ )
+ _write_dispatch[idx] = lambda self, *values: ( # noqa: F821
+ b"".join(self._pack(fmt, value) for value in values)
+ )
+
+ list(
+ map(
+ _register_basic,
+ [
+ (TiffTags.SHORT, "H", "short"),
+ (TiffTags.LONG, "L", "long"),
+ (TiffTags.SIGNED_BYTE, "b", "signed byte"),
+ (TiffTags.SIGNED_SHORT, "h", "signed short"),
+ (TiffTags.SIGNED_LONG, "l", "signed long"),
+ (TiffTags.FLOAT, "f", "float"),
+ (TiffTags.DOUBLE, "d", "double"),
+ (TiffTags.IFD, "L", "long"),
+ (TiffTags.LONG8, "Q", "long8"),
+ ],
+ )
+ )
+
+ @_register_loader(1, 1) # Basic type, except for the legacy API.
+ def load_byte(self, data, legacy_api=True):
+ return data
+
+ @_register_writer(1) # Basic type, except for the legacy API.
+ def write_byte(self, data):
+ if isinstance(data, IFDRational):
+ data = int(data)
+ if isinstance(data, int):
+ data = bytes((data,))
+ return data
+
+ @_register_loader(2, 1)
+ def load_string(self, data, legacy_api=True):
+ if data.endswith(b"\0"):
+ data = data[:-1]
+ return data.decode("latin-1", "replace")
+
+ @_register_writer(2)
+ def write_string(self, value):
+ # remerge of https://github.com/python-pillow/Pillow/pull/1416
+ if isinstance(value, int):
+ value = str(value)
+ if not isinstance(value, bytes):
+ value = value.encode("ascii", "replace")
+ return value + b"\0"
+
+ @_register_loader(5, 8)
+ def load_rational(self, data, legacy_api=True):
+ vals = self._unpack(f"{len(data) // 4}L", data)
+
+ def combine(a, b):
+ return (a, b) if legacy_api else IFDRational(a, b)
+
+ return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2]))
+
+ @_register_writer(5)
+ def write_rational(self, *values):
+ return b"".join(
+ self._pack("2L", *_limit_rational(frac, 2**32 - 1)) for frac in values
+ )
+
+ @_register_loader(7, 1)
+ def load_undefined(self, data, legacy_api=True):
+ return data
+
+ @_register_writer(7)
+ def write_undefined(self, value):
+ if isinstance(value, int):
+ value = str(value).encode("ascii", "replace")
+ return value
+
+ @_register_loader(10, 8)
+ def load_signed_rational(self, data, legacy_api=True):
+ vals = self._unpack(f"{len(data) // 4}l", data)
+
+ def combine(a, b):
+ return (a, b) if legacy_api else IFDRational(a, b)
+
+ return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2]))
+
+ @_register_writer(10)
+ def write_signed_rational(self, *values):
+ return b"".join(
+ self._pack("2l", *_limit_signed_rational(frac, 2**31 - 1, -(2**31)))
+ for frac in values
+ )
+
+ def _ensure_read(self, fp, size):
+ ret = fp.read(size)
+ if len(ret) != size:
+ msg = (
+ "Corrupt EXIF data. "
+ f"Expecting to read {size} bytes but only got {len(ret)}. "
+ )
+ raise OSError(msg)
+ return ret
+
+ def load(self, fp):
+ self.reset()
+ self._offset = fp.tell()
+
+ try:
+ tag_count = (
+ self._unpack("Q", self._ensure_read(fp, 8))
+ if self._bigtiff
+ else self._unpack("H", self._ensure_read(fp, 2))
+ )[0]
+ for i in range(tag_count):
+ tag, typ, count, data = (
+ self._unpack("HHQ8s", self._ensure_read(fp, 20))
+ if self._bigtiff
+ else self._unpack("HHL4s", self._ensure_read(fp, 12))
+ )
+
+ tagname = TiffTags.lookup(tag, self.group).name
+ typname = TYPES.get(typ, "unknown")
+ msg = f"tag: {tagname} ({tag}) - type: {typname} ({typ})"
+
+ try:
+ unit_size, handler = self._load_dispatch[typ]
+ except KeyError:
+ logger.debug("%s - unsupported type %s", msg, typ)
+ continue # ignore unsupported type
+ size = count * unit_size
+ if size > (8 if self._bigtiff else 4):
+ here = fp.tell()
+ (offset,) = self._unpack("Q" if self._bigtiff else "L", data)
+ msg += f" Tag Location: {here} - Data Location: {offset}"
+ fp.seek(offset)
+ data = ImageFile._safe_read(fp, size)
+ fp.seek(here)
+ else:
+ data = data[:size]
+
+ if len(data) != size:
+ warnings.warn(
+ "Possibly corrupt EXIF data. "
+ f"Expecting to read {size} bytes but only got {len(data)}."
+ f" Skipping tag {tag}"
+ )
+ logger.debug(msg)
+ continue
+
+ if not data:
+ logger.debug(msg)
+ continue
+
+ self._tagdata[tag] = data
+ self.tagtype[tag] = typ
+
+ msg += " - value: " + (
+ "" % size if size > 32 else repr(data)
+ )
+ logger.debug(msg)
+
+ (self.next,) = (
+ self._unpack("Q", self._ensure_read(fp, 8))
+ if self._bigtiff
+ else self._unpack("L", self._ensure_read(fp, 4))
+ )
+ except OSError as msg:
+ warnings.warn(str(msg))
+ return
+
+ def tobytes(self, offset=0):
+ # FIXME What about tagdata?
+ result = self._pack("H", len(self._tags_v2))
+
+ entries = []
+ offset = offset + len(result) + len(self._tags_v2) * 12 + 4
+ stripoffsets = None
+
+ # pass 1: convert tags to binary format
+ # always write tags in ascending order
+ for tag, value in sorted(self._tags_v2.items()):
+ if tag == STRIPOFFSETS:
+ stripoffsets = len(entries)
+ typ = self.tagtype.get(tag)
+ logger.debug("Tag %s, Type: %s, Value: %s", tag, typ, repr(value))
+ is_ifd = typ == TiffTags.LONG and isinstance(value, dict)
+ if is_ifd:
+ if self._endian == "<":
+ ifh = b"II\x2A\x00\x08\x00\x00\x00"
+ else:
+ ifh = b"MM\x00\x2A\x00\x00\x00\x08"
+ ifd = ImageFileDirectory_v2(ifh, group=tag)
+ values = self._tags_v2[tag]
+ for ifd_tag, ifd_value in values.items():
+ ifd[ifd_tag] = ifd_value
+ data = ifd.tobytes(offset)
+ else:
+ values = value if isinstance(value, tuple) else (value,)
+ data = self._write_dispatch[typ](self, *values)
+
+ tagname = TiffTags.lookup(tag, self.group).name
+ typname = "ifd" if is_ifd else TYPES.get(typ, "unknown")
+ msg = f"save: {tagname} ({tag}) - type: {typname} ({typ})"
+ msg += " - value: " + (
+ "" % len(data) if len(data) >= 16 else str(values)
+ )
+ logger.debug(msg)
+
+ # count is sum of lengths for string and arbitrary data
+ if is_ifd:
+ count = 1
+ elif typ in [TiffTags.BYTE, TiffTags.ASCII, TiffTags.UNDEFINED]:
+ count = len(data)
+ else:
+ count = len(values)
+ # figure out if data fits into the entry
+ if len(data) <= 4:
+ entries.append((tag, typ, count, data.ljust(4, b"\0"), b""))
+ else:
+ entries.append((tag, typ, count, self._pack("L", offset), data))
+ offset += (len(data) + 1) // 2 * 2 # pad to word
+
+ # update strip offset data to point beyond auxiliary data
+ if stripoffsets is not None:
+ tag, typ, count, value, data = entries[stripoffsets]
+ if data:
+ msg = "multistrip support not yet implemented"
+ raise NotImplementedError(msg)
+ value = self._pack("L", self._unpack("L", value)[0] + offset)
+ entries[stripoffsets] = tag, typ, count, value, data
+
+ # pass 2: write entries to file
+ for tag, typ, count, value, data in entries:
+ logger.debug("%s %s %s %s %s", tag, typ, count, repr(value), repr(data))
+ result += self._pack("HHL4s", tag, typ, count, value)
+
+ # -- overwrite here for multi-page --
+ result += b"\0\0\0\0" # end of entries
+
+ # pass 3: write auxiliary data to file
+ for tag, typ, count, value, data in entries:
+ result += data
+ if len(data) & 1:
+ result += b"\0"
+
+ return result
+
+ def save(self, fp):
+ if fp.tell() == 0: # skip TIFF header on subsequent pages
+ # tiff header -- PIL always starts the first IFD at offset 8
+ fp.write(self._prefix + self._pack("HL", 42, 8))
+
+ offset = fp.tell()
+ result = self.tobytes(offset)
+ fp.write(result)
+ return offset + len(result)
+
+
+ImageFileDirectory_v2._load_dispatch = _load_dispatch
+ImageFileDirectory_v2._write_dispatch = _write_dispatch
+for idx, name in TYPES.items():
+ name = name.replace(" ", "_")
+ setattr(ImageFileDirectory_v2, "load_" + name, _load_dispatch[idx][1])
+ setattr(ImageFileDirectory_v2, "write_" + name, _write_dispatch[idx])
+del _load_dispatch, _write_dispatch, idx, name
+
+
+# Legacy ImageFileDirectory support.
+class ImageFileDirectory_v1(ImageFileDirectory_v2):
+ """This class represents the **legacy** interface to a TIFF tag directory.
+
+ Exposes a dictionary interface of the tags in the directory::
+
+ ifd = ImageFileDirectory_v1()
+ ifd[key] = 'Some Data'
+ ifd.tagtype[key] = TiffTags.ASCII
+ print(ifd[key])
+ ('Some Data',)
+
+ Also contains a dictionary of tag types as read from the tiff image file,
+ :attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v1.tagtype`.
+
+ Values are returned as a tuple.
+
+ .. deprecated:: 3.0.0
+ """
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._legacy_api = True
+
+ tags = property(lambda self: self._tags_v1)
+ tagdata = property(lambda self: self._tagdata)
+
+ # defined in ImageFileDirectory_v2
+ tagtype: dict
+ """Dictionary of tag types"""
+
+ @classmethod
+ def from_v2(cls, original):
+ """Returns an
+ :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
+ instance with the same data as is contained in the original
+ :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
+ instance.
+
+ :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
+
+ """
+
+ ifd = cls(prefix=original.prefix)
+ ifd._tagdata = original._tagdata
+ ifd.tagtype = original.tagtype
+ ifd.next = original.next # an indicator for multipage tiffs
+ return ifd
+
+ def to_v2(self):
+ """Returns an
+ :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
+ instance with the same data as is contained in the original
+ :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
+ instance.
+
+ :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
+
+ """
+
+ ifd = ImageFileDirectory_v2(prefix=self.prefix)
+ ifd._tagdata = dict(self._tagdata)
+ ifd.tagtype = dict(self.tagtype)
+ ifd._tags_v2 = dict(self._tags_v2)
+ return ifd
+
+ def __contains__(self, tag):
+ return tag in self._tags_v1 or tag in self._tagdata
+
+ def __len__(self):
+ return len(set(self._tagdata) | set(self._tags_v1))
+
+ def __iter__(self):
+ return iter(set(self._tagdata) | set(self._tags_v1))
+
+ def __setitem__(self, tag, value):
+ for legacy_api in (False, True):
+ self._setitem(tag, value, legacy_api)
+
+ def __getitem__(self, tag):
+ if tag not in self._tags_v1: # unpack on the fly
+ data = self._tagdata[tag]
+ typ = self.tagtype[tag]
+ size, handler = self._load_dispatch[typ]
+ for legacy in (False, True):
+ self._setitem(tag, handler(self, data, legacy), legacy)
+ val = self._tags_v1[tag]
+ if not isinstance(val, (tuple, bytes)):
+ val = (val,)
+ return val
+
+
+# undone -- switch this pointer when IFD_LEGACY_API == False
+ImageFileDirectory = ImageFileDirectory_v1
+
+
+##
+# Image plugin for TIFF files.
+
+
+class TiffImageFile(ImageFile.ImageFile):
+ format = "TIFF"
+ format_description = "Adobe TIFF"
+ _close_exclusive_fp_after_loading = False
+
+ def __init__(self, fp=None, filename=None):
+ self.tag_v2 = None
+ """ Image file directory (tag dictionary) """
+
+ self.tag = None
+ """ Legacy tag entries """
+
+ super().__init__(fp, filename)
+
+ def _open(self):
+ """Open the first image in a TIFF file"""
+
+ # Header
+ ifh = self.fp.read(8)
+ if ifh[2] == 43:
+ ifh += self.fp.read(8)
+
+ self.tag_v2 = ImageFileDirectory_v2(ifh)
+
+ # legacy IFD entries will be filled in later
+ self.ifd = None
+
+ # setup frame pointers
+ self.__first = self.__next = self.tag_v2.next
+ self.__frame = -1
+ self._fp = self.fp
+ self._frame_pos = []
+ self._n_frames = None
+
+ logger.debug("*** TiffImageFile._open ***")
+ logger.debug("- __first: %s", self.__first)
+ logger.debug("- ifh: %s", repr(ifh)) # Use repr to avoid str(bytes)
+
+ # and load the first frame
+ self._seek(0)
+
+ @property
+ def n_frames(self):
+ if self._n_frames is None:
+ current = self.tell()
+ self._seek(len(self._frame_pos))
+ while self._n_frames is None:
+ self._seek(self.tell() + 1)
+ self.seek(current)
+ return self._n_frames
+
+ def seek(self, frame):
+ """Select a given frame as current image"""
+ if not self._seek_check(frame):
+ return
+ self._seek(frame)
+ # Create a new core image object on second and
+ # subsequent frames in the image. Image may be
+ # different size/mode.
+ Image._decompression_bomb_check(self.size)
+ self.im = Image.core.new(self.mode, self.size)
+
+ def _seek(self, frame):
+ self.fp = self._fp
+
+ # reset buffered io handle in case fp
+ # was passed to libtiff, invalidating the buffer
+ self.fp.tell()
+
+ while len(self._frame_pos) <= frame:
+ if not self.__next:
+ msg = "no more images in TIFF file"
+ raise EOFError(msg)
+ logger.debug(
+ "Seeking to frame %s, on frame %s, __next %s, location: %s",
+ frame,
+ self.__frame,
+ self.__next,
+ self.fp.tell(),
+ )
+ self.fp.seek(self.__next)
+ self._frame_pos.append(self.__next)
+ logger.debug("Loading tags, location: %s", self.fp.tell())
+ self.tag_v2.load(self.fp)
+ if self.tag_v2.next in self._frame_pos:
+ # This IFD has already been processed
+ # Declare this to be the end of the image
+ self.__next = 0
+ else:
+ self.__next = self.tag_v2.next
+ if self.__next == 0:
+ self._n_frames = frame + 1
+ if len(self._frame_pos) == 1:
+ self.is_animated = self.__next != 0
+ self.__frame += 1
+ self.fp.seek(self._frame_pos[frame])
+ self.tag_v2.load(self.fp)
+ self._reload_exif()
+ # fill the legacy tag/ifd entries
+ self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2)
+ self.__frame = frame
+ self._setup()
+
+ def tell(self):
+ """Return the current frame number"""
+ return self.__frame
+
+ def getxmp(self):
+ """
+ Returns a dictionary containing the XMP tags.
+ Requires defusedxml to be installed.
+
+ :returns: XMP tags in a dictionary.
+ """
+ return self._getxmp(self.tag_v2[XMP]) if XMP in self.tag_v2 else {}
+
+ def get_photoshop_blocks(self):
+ """
+ Returns a dictionary of Photoshop "Image Resource Blocks".
+ The keys are the image resource ID. For more information, see
+ https://www.adobe.com/devnet-apps/photoshop/fileformatashtml/#50577409_pgfId-1037727
+
+ :returns: Photoshop "Image Resource Blocks" in a dictionary.
+ """
+ blocks = {}
+ val = self.tag_v2.get(ExifTags.Base.ImageResources)
+ if val:
+ while val[:4] == b"8BIM":
+ id = i16(val[4:6])
+ n = math.ceil((val[6] + 1) / 2) * 2
+ size = i32(val[6 + n : 10 + n])
+ data = val[10 + n : 10 + n + size]
+ blocks[id] = {"data": data}
+
+ val = val[math.ceil((10 + n + size) / 2) * 2 :]
+ return blocks
+
+ def load(self):
+ if self.tile and self.use_load_libtiff:
+ return self._load_libtiff()
+ return super().load()
+
+ def load_end(self):
+ # allow closing if we're on the first frame, there's no next
+ # This is the ImageFile.load path only, libtiff specific below.
+ if not self.is_animated:
+ self._close_exclusive_fp_after_loading = True
+
+ # reset buffered io handle in case fp
+ # was passed to libtiff, invalidating the buffer
+ self.fp.tell()
+
+ # load IFD data from fp before it is closed
+ exif = self.getexif()
+ for key in TiffTags.TAGS_V2_GROUPS:
+ if key not in exif:
+ continue
+ exif.get_ifd(key)
+
+ ImageOps.exif_transpose(self, in_place=True)
+ if ExifTags.Base.Orientation in self.tag_v2:
+ del self.tag_v2[ExifTags.Base.Orientation]
+
+ def _load_libtiff(self):
+ """Overload method triggered when we detect a compressed tiff
+ Calls out to libtiff"""
+
+ Image.Image.load(self)
+
+ self.load_prepare()
+
+ if not len(self.tile) == 1:
+ msg = "Not exactly one tile"
+ raise OSError(msg)
+
+ # (self._compression, (extents tuple),
+ # 0, (rawmode, self._compression, fp))
+ extents = self.tile[0][1]
+ args = list(self.tile[0][3])
+
+ # To be nice on memory footprint, if there's a
+ # file descriptor, use that instead of reading
+ # into a string in python.
+ try:
+ fp = hasattr(self.fp, "fileno") and self.fp.fileno()
+ # flush the file descriptor, prevents error on pypy 2.4+
+ # should also eliminate the need for fp.tell
+ # in _seek
+ if hasattr(self.fp, "flush"):
+ self.fp.flush()
+ except OSError:
+ # io.BytesIO have a fileno, but returns an OSError if
+ # it doesn't use a file descriptor.
+ fp = False
+
+ if fp:
+ args[2] = fp
+
+ decoder = Image._getdecoder(
+ self.mode, "libtiff", tuple(args), self.decoderconfig
+ )
+ try:
+ decoder.setimage(self.im, extents)
+ except ValueError as e:
+ msg = "Couldn't set the image"
+ raise OSError(msg) from e
+
+ close_self_fp = self._exclusive_fp and not self.is_animated
+ if hasattr(self.fp, "getvalue"):
+ # We've got a stringio like thing passed in. Yay for all in memory.
+ # The decoder needs the entire file in one shot, so there's not
+ # a lot we can do here other than give it the entire file.
+ # unless we could do something like get the address of the
+ # underlying string for stringio.
+ #
+ # Rearranging for supporting byteio items, since they have a fileno
+ # that returns an OSError if there's no underlying fp. Easier to
+ # deal with here by reordering.
+ logger.debug("have getvalue. just sending in a string from getvalue")
+ n, err = decoder.decode(self.fp.getvalue())
+ elif fp:
+ # we've got a actual file on disk, pass in the fp.
+ logger.debug("have fileno, calling fileno version of the decoder.")
+ if not close_self_fp:
+ self.fp.seek(0)
+ # 4 bytes, otherwise the trace might error out
+ n, err = decoder.decode(b"fpfp")
+ else:
+ # we have something else.
+ logger.debug("don't have fileno or getvalue. just reading")
+ self.fp.seek(0)
+ # UNDONE -- so much for that buffer size thing.
+ n, err = decoder.decode(self.fp.read())
+
+ self.tile = []
+ self.readonly = 0
+
+ self.load_end()
+
+ if close_self_fp:
+ self.fp.close()
+ self.fp = None # might be shared
+
+ if err < 0:
+ raise OSError(err)
+
+ return Image.Image.load(self)
+
+ def _setup(self):
+ """Setup this image object based on current tags"""
+
+ if 0xBC01 in self.tag_v2:
+ msg = "Windows Media Photo files not yet supported"
+ raise OSError(msg)
+
+ # extract relevant tags
+ self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)]
+ self._planar_configuration = self.tag_v2.get(PLANAR_CONFIGURATION, 1)
+
+ # photometric is a required tag, but not everyone is reading
+ # the specification
+ photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0)
+
+ # old style jpeg compression images most certainly are YCbCr
+ if self._compression == "tiff_jpeg":
+ photo = 6
+
+ fillorder = self.tag_v2.get(FILLORDER, 1)
+
+ logger.debug("*** Summary ***")
+ logger.debug("- compression: %s", self._compression)
+ logger.debug("- photometric_interpretation: %s", photo)
+ logger.debug("- planar_configuration: %s", self._planar_configuration)
+ logger.debug("- fill_order: %s", fillorder)
+ logger.debug("- YCbCr subsampling: %s", self.tag.get(YCBCRSUBSAMPLING))
+
+ # size
+ xsize = int(self.tag_v2.get(IMAGEWIDTH))
+ ysize = int(self.tag_v2.get(IMAGELENGTH))
+ self._size = xsize, ysize
+
+ logger.debug("- size: %s", self.size)
+
+ sample_format = self.tag_v2.get(SAMPLEFORMAT, (1,))
+ if len(sample_format) > 1 and max(sample_format) == min(sample_format) == 1:
+ # SAMPLEFORMAT is properly per band, so an RGB image will
+ # be (1,1,1). But, we don't support per band pixel types,
+ # and anything more than one band is a uint8. So, just
+ # take the first element. Revisit this if adding support
+ # for more exotic images.
+ sample_format = (1,)
+
+ bps_tuple = self.tag_v2.get(BITSPERSAMPLE, (1,))
+ extra_tuple = self.tag_v2.get(EXTRASAMPLES, ())
+ if photo in (2, 6, 8): # RGB, YCbCr, LAB
+ bps_count = 3
+ elif photo == 5: # CMYK
+ bps_count = 4
+ else:
+ bps_count = 1
+ bps_count += len(extra_tuple)
+ bps_actual_count = len(bps_tuple)
+ samples_per_pixel = self.tag_v2.get(
+ SAMPLESPERPIXEL,
+ 3 if self._compression == "tiff_jpeg" and photo in (2, 6) else 1,
+ )
+
+ if samples_per_pixel > MAX_SAMPLESPERPIXEL:
+ # DOS check, samples_per_pixel can be a Long, and we extend the tuple below
+ logger.error(
+ "More samples per pixel than can be decoded: %s", samples_per_pixel
+ )
+ msg = "Invalid value for samples per pixel"
+ raise SyntaxError(msg)
+
+ if samples_per_pixel < bps_actual_count:
+ # If a file has more values in bps_tuple than expected,
+ # remove the excess.
+ bps_tuple = bps_tuple[:samples_per_pixel]
+ elif samples_per_pixel > bps_actual_count and bps_actual_count == 1:
+ # If a file has only one value in bps_tuple, when it should have more,
+ # presume it is the same number of bits for all of the samples.
+ bps_tuple = bps_tuple * samples_per_pixel
+
+ if len(bps_tuple) != samples_per_pixel:
+ msg = "unknown data organization"
+ raise SyntaxError(msg)
+
+ # mode: check photometric interpretation and bits per pixel
+ key = (
+ self.tag_v2.prefix,
+ photo,
+ sample_format,
+ fillorder,
+ bps_tuple,
+ extra_tuple,
+ )
+ logger.debug("format key: %s", key)
+ try:
+ self._mode, rawmode = OPEN_INFO[key]
+ except KeyError as e:
+ logger.debug("- unsupported format")
+ msg = "unknown pixel mode"
+ raise SyntaxError(msg) from e
+
+ logger.debug("- raw mode: %s", rawmode)
+ logger.debug("- pil mode: %s", self.mode)
+
+ self.info["compression"] = self._compression
+
+ xres = self.tag_v2.get(X_RESOLUTION, 1)
+ yres = self.tag_v2.get(Y_RESOLUTION, 1)
+
+ if xres and yres:
+ resunit = self.tag_v2.get(RESOLUTION_UNIT)
+ if resunit == 2: # dots per inch
+ self.info["dpi"] = (xres, yres)
+ elif resunit == 3: # dots per centimeter. convert to dpi
+ self.info["dpi"] = (xres * 2.54, yres * 2.54)
+ elif resunit is None: # used to default to 1, but now 2)
+ self.info["dpi"] = (xres, yres)
+ # For backward compatibility,
+ # we also preserve the old behavior
+ self.info["resolution"] = xres, yres
+ else: # No absolute unit of measurement
+ self.info["resolution"] = xres, yres
+
+ # build tile descriptors
+ x = y = layer = 0
+ self.tile = []
+ self.use_load_libtiff = READ_LIBTIFF or self._compression != "raw"
+ if self.use_load_libtiff:
+ # Decoder expects entire file as one tile.
+ # There's a buffer size limit in load (64k)
+ # so large g4 images will fail if we use that
+ # function.
+ #
+ # Setup the one tile for the whole image, then
+ # use the _load_libtiff function.
+
+ # libtiff handles the fillmode for us, so 1;IR should
+ # actually be 1;I. Including the R double reverses the
+ # bits, so stripes of the image are reversed. See
+ # https://github.com/python-pillow/Pillow/issues/279
+ if fillorder == 2:
+ # Replace fillorder with fillorder=1
+ key = key[:3] + (1,) + key[4:]
+ logger.debug("format key: %s", key)
+ # this should always work, since all the
+ # fillorder==2 modes have a corresponding
+ # fillorder=1 mode
+ self._mode, rawmode = OPEN_INFO[key]
+ # libtiff always returns the bytes in native order.
+ # we're expecting image byte order. So, if the rawmode
+ # contains I;16, we need to convert from native to image
+ # byte order.
+ if rawmode == "I;16":
+ rawmode = "I;16N"
+ if ";16B" in rawmode:
+ rawmode = rawmode.replace(";16B", ";16N")
+ if ";16L" in rawmode:
+ rawmode = rawmode.replace(";16L", ";16N")
+
+ # YCbCr images with new jpeg compression with pixels in one plane
+ # unpacked straight into RGB values
+ if (
+ photo == 6
+ and self._compression == "jpeg"
+ and self._planar_configuration == 1
+ ):
+ rawmode = "RGB"
+
+ # Offset in the tile tuple is 0, we go from 0,0 to
+ # w,h, and we only do this once -- eds
+ a = (rawmode, self._compression, False, self.tag_v2.offset)
+ self.tile.append(("libtiff", (0, 0, xsize, ysize), 0, a))
+
+ elif STRIPOFFSETS in self.tag_v2 or TILEOFFSETS in self.tag_v2:
+ # striped image
+ if STRIPOFFSETS in self.tag_v2:
+ offsets = self.tag_v2[STRIPOFFSETS]
+ h = self.tag_v2.get(ROWSPERSTRIP, ysize)
+ w = self.size[0]
+ else:
+ # tiled image
+ offsets = self.tag_v2[TILEOFFSETS]
+ w = self.tag_v2.get(TILEWIDTH)
+ h = self.tag_v2.get(TILELENGTH)
+
+ for offset in offsets:
+ if x + w > xsize:
+ stride = w * sum(bps_tuple) / 8 # bytes per line
+ else:
+ stride = 0
+
+ tile_rawmode = rawmode
+ if self._planar_configuration == 2:
+ # each band on it's own layer
+ tile_rawmode = rawmode[layer]
+ # adjust stride width accordingly
+ stride /= bps_count
+
+ a = (tile_rawmode, int(stride), 1)
+ self.tile.append(
+ (
+ self._compression,
+ (x, y, min(x + w, xsize), min(y + h, ysize)),
+ offset,
+ a,
+ )
+ )
+ x = x + w
+ if x >= self.size[0]:
+ x, y = 0, y + h
+ if y >= self.size[1]:
+ x = y = 0
+ layer += 1
+ else:
+ logger.debug("- unsupported data organization")
+ msg = "unknown data organization"
+ raise SyntaxError(msg)
+
+ # Fix up info.
+ if ICCPROFILE in self.tag_v2:
+ self.info["icc_profile"] = self.tag_v2[ICCPROFILE]
+
+ # fixup palette descriptor
+
+ if self.mode in ["P", "PA"]:
+ palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]]
+ self.palette = ImagePalette.raw("RGB;L", b"".join(palette))
+
+
+#
+# --------------------------------------------------------------------
+# Write TIFF files
+
+# little endian is default except for image modes with
+# explicit big endian byte-order
+
+SAVE_INFO = {
+ # mode => rawmode, byteorder, photometrics,
+ # sampleformat, bitspersample, extra
+ "1": ("1", II, 1, 1, (1,), None),
+ "L": ("L", II, 1, 1, (8,), None),
+ "LA": ("LA", II, 1, 1, (8, 8), 2),
+ "P": ("P", II, 3, 1, (8,), None),
+ "PA": ("PA", II, 3, 1, (8, 8), 2),
+ "I": ("I;32S", II, 1, 2, (32,), None),
+ "I;16": ("I;16", II, 1, 1, (16,), None),
+ "I;16S": ("I;16S", II, 1, 2, (16,), None),
+ "F": ("F;32F", II, 1, 3, (32,), None),
+ "RGB": ("RGB", II, 2, 1, (8, 8, 8), None),
+ "RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0),
+ "RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2),
+ "CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None),
+ "YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None),
+ "LAB": ("LAB", II, 8, 1, (8, 8, 8), None),
+ "I;32BS": ("I;32BS", MM, 1, 2, (32,), None),
+ "I;16B": ("I;16B", MM, 1, 1, (16,), None),
+ "I;16BS": ("I;16BS", MM, 1, 2, (16,), None),
+ "F;32BF": ("F;32BF", MM, 1, 3, (32,), None),
+}
+
+
+def _save(im, fp, filename):
+ try:
+ rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode]
+ except KeyError as e:
+ msg = f"cannot write mode {im.mode} as TIFF"
+ raise OSError(msg) from e
+
+ ifd = ImageFileDirectory_v2(prefix=prefix)
+
+ encoderinfo = im.encoderinfo
+ encoderconfig = im.encoderconfig
+ try:
+ compression = encoderinfo["compression"]
+ except KeyError:
+ compression = im.info.get("compression")
+ if isinstance(compression, int):
+ # compression value may be from BMP. Ignore it
+ compression = None
+ if compression is None:
+ compression = "raw"
+ elif compression == "tiff_jpeg":
+ # OJPEG is obsolete, so use new-style JPEG compression instead
+ compression = "jpeg"
+ elif compression == "tiff_deflate":
+ compression = "tiff_adobe_deflate"
+
+ libtiff = WRITE_LIBTIFF or compression != "raw"
+
+ # required for color libtiff images
+ ifd[PLANAR_CONFIGURATION] = 1
+
+ ifd[IMAGEWIDTH] = im.size[0]
+ ifd[IMAGELENGTH] = im.size[1]
+
+ # write any arbitrary tags passed in as an ImageFileDirectory
+ if "tiffinfo" in encoderinfo:
+ info = encoderinfo["tiffinfo"]
+ elif "exif" in encoderinfo:
+ info = encoderinfo["exif"]
+ if isinstance(info, bytes):
+ exif = Image.Exif()
+ exif.load(info)
+ info = exif
+ else:
+ info = {}
+ logger.debug("Tiffinfo Keys: %s", list(info))
+ if isinstance(info, ImageFileDirectory_v1):
+ info = info.to_v2()
+ for key in info:
+ if isinstance(info, Image.Exif) and key in TiffTags.TAGS_V2_GROUPS:
+ ifd[key] = info.get_ifd(key)
+ else:
+ ifd[key] = info.get(key)
+ try:
+ ifd.tagtype[key] = info.tagtype[key]
+ except Exception:
+ pass # might not be an IFD. Might not have populated type
+
+ # additions written by Greg Couch, gregc@cgl.ucsf.edu
+ # inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com
+ if hasattr(im, "tag_v2"):
+ # preserve tags from original TIFF image file
+ for key in (
+ RESOLUTION_UNIT,
+ X_RESOLUTION,
+ Y_RESOLUTION,
+ IPTC_NAA_CHUNK,
+ PHOTOSHOP_CHUNK,
+ XMP,
+ ):
+ if key in im.tag_v2:
+ ifd[key] = im.tag_v2[key]
+ ifd.tagtype[key] = im.tag_v2.tagtype[key]
+
+ # preserve ICC profile (should also work when saving other formats
+ # which support profiles as TIFF) -- 2008-06-06 Florian Hoech
+ icc = encoderinfo.get("icc_profile", im.info.get("icc_profile"))
+ if icc:
+ ifd[ICCPROFILE] = icc
+
+ for key, name in [
+ (IMAGEDESCRIPTION, "description"),
+ (X_RESOLUTION, "resolution"),
+ (Y_RESOLUTION, "resolution"),
+ (X_RESOLUTION, "x_resolution"),
+ (Y_RESOLUTION, "y_resolution"),
+ (RESOLUTION_UNIT, "resolution_unit"),
+ (SOFTWARE, "software"),
+ (DATE_TIME, "date_time"),
+ (ARTIST, "artist"),
+ (COPYRIGHT, "copyright"),
+ ]:
+ if name in encoderinfo:
+ ifd[key] = encoderinfo[name]
+
+ dpi = encoderinfo.get("dpi")
+ if dpi:
+ ifd[RESOLUTION_UNIT] = 2
+ ifd[X_RESOLUTION] = dpi[0]
+ ifd[Y_RESOLUTION] = dpi[1]
+
+ if bits != (1,):
+ ifd[BITSPERSAMPLE] = bits
+ if len(bits) != 1:
+ ifd[SAMPLESPERPIXEL] = len(bits)
+ if extra is not None:
+ ifd[EXTRASAMPLES] = extra
+ if format != 1:
+ ifd[SAMPLEFORMAT] = format
+
+ if PHOTOMETRIC_INTERPRETATION not in ifd:
+ ifd[PHOTOMETRIC_INTERPRETATION] = photo
+ elif im.mode in ("1", "L") and ifd[PHOTOMETRIC_INTERPRETATION] == 0:
+ if im.mode == "1":
+ inverted_im = im.copy()
+ px = inverted_im.load()
+ for y in range(inverted_im.height):
+ for x in range(inverted_im.width):
+ px[x, y] = 0 if px[x, y] == 255 else 255
+ im = inverted_im
+ else:
+ im = ImageOps.invert(im)
+
+ if im.mode in ["P", "PA"]:
+ lut = im.im.getpalette("RGB", "RGB;L")
+ colormap = []
+ colors = len(lut) // 3
+ for i in range(3):
+ colormap += [v * 256 for v in lut[colors * i : colors * (i + 1)]]
+ colormap += [0] * (256 - colors)
+ ifd[COLORMAP] = colormap
+ # data orientation
+ w, h = ifd[IMAGEWIDTH], ifd[IMAGELENGTH]
+ stride = len(bits) * ((w * bits[0] + 7) // 8)
+ if ROWSPERSTRIP not in ifd:
+ # aim for given strip size (64 KB by default) when using libtiff writer
+ if libtiff:
+ im_strip_size = encoderinfo.get("strip_size", STRIP_SIZE)
+ rows_per_strip = 1 if stride == 0 else min(im_strip_size // stride, h)
+ # JPEG encoder expects multiple of 8 rows
+ if compression == "jpeg":
+ rows_per_strip = min(((rows_per_strip + 7) // 8) * 8, h)
+ else:
+ rows_per_strip = h
+ if rows_per_strip == 0:
+ rows_per_strip = 1
+ ifd[ROWSPERSTRIP] = rows_per_strip
+ strip_byte_counts = 1 if stride == 0 else stride * ifd[ROWSPERSTRIP]
+ strips_per_image = (h + ifd[ROWSPERSTRIP] - 1) // ifd[ROWSPERSTRIP]
+ if strip_byte_counts >= 2**16:
+ ifd.tagtype[STRIPBYTECOUNTS] = TiffTags.LONG
+ ifd[STRIPBYTECOUNTS] = (strip_byte_counts,) * (strips_per_image - 1) + (
+ stride * h - strip_byte_counts * (strips_per_image - 1),
+ )
+ ifd[STRIPOFFSETS] = tuple(
+ range(0, strip_byte_counts * strips_per_image, strip_byte_counts)
+ ) # this is adjusted by IFD writer
+ # no compression by default:
+ ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1)
+
+ if im.mode == "YCbCr":
+ for tag, value in {
+ YCBCRSUBSAMPLING: (1, 1),
+ REFERENCEBLACKWHITE: (0, 255, 128, 255, 128, 255),
+ }.items():
+ ifd.setdefault(tag, value)
+
+ blocklist = [TILEWIDTH, TILELENGTH, TILEOFFSETS, TILEBYTECOUNTS]
+ if libtiff:
+ if "quality" in encoderinfo:
+ quality = encoderinfo["quality"]
+ if not isinstance(quality, int) or quality < 0 or quality > 100:
+ msg = "Invalid quality setting"
+ raise ValueError(msg)
+ if compression != "jpeg":
+ msg = "quality setting only supported for 'jpeg' compression"
+ raise ValueError(msg)
+ ifd[JPEGQUALITY] = quality
+
+ logger.debug("Saving using libtiff encoder")
+ logger.debug("Items: %s", sorted(ifd.items()))
+ _fp = 0
+ if hasattr(fp, "fileno"):
+ try:
+ fp.seek(0)
+ _fp = os.dup(fp.fileno())
+ except io.UnsupportedOperation:
+ pass
+
+ # optional types for non core tags
+ types = {}
+ # STRIPOFFSETS and STRIPBYTECOUNTS are added by the library
+ # based on the data in the strip.
+ # The other tags expect arrays with a certain length (fixed or depending on
+ # BITSPERSAMPLE, etc), passing arrays with a different length will result in
+ # segfaults. Block these tags until we add extra validation.
+ # SUBIFD may also cause a segfault.
+ blocklist += [
+ REFERENCEBLACKWHITE,
+ STRIPBYTECOUNTS,
+ STRIPOFFSETS,
+ TRANSFERFUNCTION,
+ SUBIFD,
+ ]
+
+ # bits per sample is a single short in the tiff directory, not a list.
+ atts = {BITSPERSAMPLE: bits[0]}
+ # Merge the ones that we have with (optional) more bits from
+ # the original file, e.g x,y resolution so that we can
+ # save(load('')) == original file.
+ legacy_ifd = {}
+ if hasattr(im, "tag"):
+ legacy_ifd = im.tag.to_v2()
+
+ # SAMPLEFORMAT is determined by the image format and should not be copied
+ # from legacy_ifd.
+ supplied_tags = {**getattr(im, "tag_v2", {}), **legacy_ifd}
+ if SAMPLEFORMAT in supplied_tags:
+ del supplied_tags[SAMPLEFORMAT]
+
+ for tag, value in itertools.chain(ifd.items(), supplied_tags.items()):
+ # Libtiff can only process certain core items without adding
+ # them to the custom dictionary.
+ # Custom items are supported for int, float, unicode, string and byte
+ # values. Other types and tuples require a tagtype.
+ if tag not in TiffTags.LIBTIFF_CORE:
+ if not getattr(Image.core, "libtiff_support_custom_tags", False):
+ continue
+
+ if tag in ifd.tagtype:
+ types[tag] = ifd.tagtype[tag]
+ elif not (isinstance(value, (int, float, str, bytes))):
+ continue
+ else:
+ type = TiffTags.lookup(tag).type
+ if type:
+ types[tag] = type
+ if tag not in atts and tag not in blocklist:
+ if isinstance(value, str):
+ atts[tag] = value.encode("ascii", "replace") + b"\0"
+ elif isinstance(value, IFDRational):
+ atts[tag] = float(value)
+ else:
+ atts[tag] = value
+
+ if SAMPLEFORMAT in atts and len(atts[SAMPLEFORMAT]) == 1:
+ atts[SAMPLEFORMAT] = atts[SAMPLEFORMAT][0]
+
+ logger.debug("Converted items: %s", sorted(atts.items()))
+
+ # libtiff always expects the bytes in native order.
+ # we're storing image byte order. So, if the rawmode
+ # contains I;16, we need to convert from native to image
+ # byte order.
+ if im.mode in ("I;16B", "I;16"):
+ rawmode = "I;16N"
+
+ # Pass tags as sorted list so that the tags are set in a fixed order.
+ # This is required by libtiff for some tags. For example, the JPEGQUALITY
+ # pseudo tag requires that the COMPRESS tag was already set.
+ tags = list(atts.items())
+ tags.sort()
+ a = (rawmode, compression, _fp, filename, tags, types)
+ e = Image._getencoder(im.mode, "libtiff", a, encoderconfig)
+ e.setimage(im.im, (0, 0) + im.size)
+ while True:
+ # undone, change to self.decodermaxblock:
+ errcode, data = e.encode(16 * 1024)[1:]
+ if not _fp:
+ fp.write(data)
+ if errcode:
+ break
+ if _fp:
+ try:
+ os.close(_fp)
+ except OSError:
+ pass
+ if errcode < 0:
+ msg = f"encoder error {errcode} when writing image file"
+ raise OSError(msg)
+
+ else:
+ for tag in blocklist:
+ del ifd[tag]
+ offset = ifd.save(fp)
+
+ ImageFile._save(
+ im, fp, [("raw", (0, 0) + im.size, offset, (rawmode, stride, 1))]
+ )
+
+ # -- helper for multi-page save --
+ if "_debug_multipage" in encoderinfo:
+ # just to access o32 and o16 (using correct byte order)
+ im._debug_multipage = ifd
+
+
+class AppendingTiffWriter:
+ fieldSizes = [
+ 0, # None
+ 1, # byte
+ 1, # ascii
+ 2, # short
+ 4, # long
+ 8, # rational
+ 1, # sbyte
+ 1, # undefined
+ 2, # sshort
+ 4, # slong
+ 8, # srational
+ 4, # float
+ 8, # double
+ 4, # ifd
+ 2, # unicode
+ 4, # complex
+ 8, # long8
+ ]
+
+ Tags = {
+ 273, # StripOffsets
+ 288, # FreeOffsets
+ 324, # TileOffsets
+ 519, # JPEGQTables
+ 520, # JPEGDCTables
+ 521, # JPEGACTables
+ }
+
+ def __init__(self, fn, new=False):
+ if hasattr(fn, "read"):
+ self.f = fn
+ self.close_fp = False
+ else:
+ self.name = fn
+ self.close_fp = True
+ try:
+ self.f = open(fn, "w+b" if new else "r+b")
+ except OSError:
+ self.f = open(fn, "w+b")
+ self.beginning = self.f.tell()
+ self.setup()
+
+ def setup(self):
+ # Reset everything.
+ self.f.seek(self.beginning, os.SEEK_SET)
+
+ self.whereToWriteNewIFDOffset = None
+ self.offsetOfNewPage = 0
+
+ self.IIMM = iimm = self.f.read(4)
+ if not iimm:
+ # empty file - first page
+ self.isFirst = True
+ return
+
+ self.isFirst = False
+ if iimm == b"II\x2a\x00":
+ self.setEndian("<")
+ elif iimm == b"MM\x00\x2a":
+ self.setEndian(">")
+ else:
+ msg = "Invalid TIFF file header"
+ raise RuntimeError(msg)
+
+ self.skipIFDs()
+ self.goToEnd()
+
+ def finalize(self):
+ if self.isFirst:
+ return
+
+ # fix offsets
+ self.f.seek(self.offsetOfNewPage)
+
+ iimm = self.f.read(4)
+ if not iimm:
+ # Make it easy to finish a frame without committing to a new one.
+ return
+
+ if iimm != self.IIMM:
+ msg = "IIMM of new page doesn't match IIMM of first page"
+ raise RuntimeError(msg)
+
+ ifd_offset = self.readLong()
+ ifd_offset += self.offsetOfNewPage
+ self.f.seek(self.whereToWriteNewIFDOffset)
+ self.writeLong(ifd_offset)
+ self.f.seek(ifd_offset)
+ self.fixIFD()
+
+ def newFrame(self):
+ # Call this to finish a frame.
+ self.finalize()
+ self.setup()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ if self.close_fp:
+ self.close()
+ return False
+
+ def tell(self):
+ return self.f.tell() - self.offsetOfNewPage
+
+ def seek(self, offset, whence=io.SEEK_SET):
+ if whence == os.SEEK_SET:
+ offset += self.offsetOfNewPage
+
+ self.f.seek(offset, whence)
+ return self.tell()
+
+ def goToEnd(self):
+ self.f.seek(0, os.SEEK_END)
+ pos = self.f.tell()
+
+ # pad to 16 byte boundary
+ pad_bytes = 16 - pos % 16
+ if 0 < pad_bytes < 16:
+ self.f.write(bytes(pad_bytes))
+ self.offsetOfNewPage = self.f.tell()
+
+ def setEndian(self, endian):
+ self.endian = endian
+ self.longFmt = self.endian + "L"
+ self.shortFmt = self.endian + "H"
+ self.tagFormat = self.endian + "HHL"
+
+ def skipIFDs(self):
+ while True:
+ ifd_offset = self.readLong()
+ if ifd_offset == 0:
+ self.whereToWriteNewIFDOffset = self.f.tell() - 4
+ break
+
+ self.f.seek(ifd_offset)
+ num_tags = self.readShort()
+ self.f.seek(num_tags * 12, os.SEEK_CUR)
+
+ def write(self, data):
+ return self.f.write(data)
+
+ def readShort(self):
+ (value,) = struct.unpack(self.shortFmt, self.f.read(2))
+ return value
+
+ def readLong(self):
+ (value,) = struct.unpack(self.longFmt, self.f.read(4))
+ return value
+
+ def rewriteLastShortToLong(self, value):
+ self.f.seek(-2, os.SEEK_CUR)
+ bytes_written = self.f.write(struct.pack(self.longFmt, value))
+ if bytes_written is not None and bytes_written != 4:
+ msg = f"wrote only {bytes_written} bytes but wanted 4"
+ raise RuntimeError(msg)
+
+ def rewriteLastShort(self, value):
+ self.f.seek(-2, os.SEEK_CUR)
+ bytes_written = self.f.write(struct.pack(self.shortFmt, value))
+ if bytes_written is not None and bytes_written != 2:
+ msg = f"wrote only {bytes_written} bytes but wanted 2"
+ raise RuntimeError(msg)
+
+ def rewriteLastLong(self, value):
+ self.f.seek(-4, os.SEEK_CUR)
+ bytes_written = self.f.write(struct.pack(self.longFmt, value))
+ if bytes_written is not None and bytes_written != 4:
+ msg = f"wrote only {bytes_written} bytes but wanted 4"
+ raise RuntimeError(msg)
+
+ def writeShort(self, value):
+ bytes_written = self.f.write(struct.pack(self.shortFmt, value))
+ if bytes_written is not None and bytes_written != 2:
+ msg = f"wrote only {bytes_written} bytes but wanted 2"
+ raise RuntimeError(msg)
+
+ def writeLong(self, value):
+ bytes_written = self.f.write(struct.pack(self.longFmt, value))
+ if bytes_written is not None and bytes_written != 4:
+ msg = f"wrote only {bytes_written} bytes but wanted 4"
+ raise RuntimeError(msg)
+
+ def close(self):
+ self.finalize()
+ self.f.close()
+
+ def fixIFD(self):
+ num_tags = self.readShort()
+
+ for i in range(num_tags):
+ tag, field_type, count = struct.unpack(self.tagFormat, self.f.read(8))
+
+ field_size = self.fieldSizes[field_type]
+ total_size = field_size * count
+ is_local = total_size <= 4
+ if not is_local:
+ offset = self.readLong()
+ offset += self.offsetOfNewPage
+ self.rewriteLastLong(offset)
+
+ if tag in self.Tags:
+ cur_pos = self.f.tell()
+
+ if is_local:
+ self.fixOffsets(
+ count, isShort=(field_size == 2), isLong=(field_size == 4)
+ )
+ self.f.seek(cur_pos + 4)
+ else:
+ self.f.seek(offset)
+ self.fixOffsets(
+ count, isShort=(field_size == 2), isLong=(field_size == 4)
+ )
+ self.f.seek(cur_pos)
+
+ offset = cur_pos = None
+
+ elif is_local:
+ # skip the locally stored value that is not an offset
+ self.f.seek(4, os.SEEK_CUR)
+
+ def fixOffsets(self, count, isShort=False, isLong=False):
+ if not isShort and not isLong:
+ msg = "offset is neither short nor long"
+ raise RuntimeError(msg)
+
+ for i in range(count):
+ offset = self.readShort() if isShort else self.readLong()
+ offset += self.offsetOfNewPage
+ if isShort and offset >= 65536:
+ # offset is now too large - we must convert shorts to longs
+ if count != 1:
+ msg = "not implemented"
+ raise RuntimeError(msg) # XXX TODO
+
+ # simple case - the offset is just one and therefore it is
+ # local (not referenced with another offset)
+ self.rewriteLastShortToLong(offset)
+ self.f.seek(-10, os.SEEK_CUR)
+ self.writeShort(TiffTags.LONG) # rewrite the type to LONG
+ self.f.seek(8, os.SEEK_CUR)
+ elif isShort:
+ self.rewriteLastShort(offset)
+ else:
+ self.rewriteLastLong(offset)
+
+
+def _save_all(im, fp, filename):
+ encoderinfo = im.encoderinfo.copy()
+ encoderconfig = im.encoderconfig
+ append_images = list(encoderinfo.get("append_images", []))
+ if not hasattr(im, "n_frames") and not append_images:
+ return _save(im, fp, filename)
+
+ cur_idx = im.tell()
+ try:
+ with AppendingTiffWriter(fp) as tf:
+ for ims in [im] + append_images:
+ ims.encoderinfo = encoderinfo
+ ims.encoderconfig = encoderconfig
+ if not hasattr(ims, "n_frames"):
+ nfr = 1
+ else:
+ nfr = ims.n_frames
+
+ for idx in range(nfr):
+ ims.seek(idx)
+ ims.load()
+ _save(ims, tf, filename)
+ tf.newFrame()
+ finally:
+ im.seek(cur_idx)
+
+
+#
+# --------------------------------------------------------------------
+# Register
+
+Image.register_open(TiffImageFile.format, TiffImageFile, _accept)
+Image.register_save(TiffImageFile.format, _save)
+Image.register_save_all(TiffImageFile.format, _save_all)
+
+Image.register_extensions(TiffImageFile.format, [".tif", ".tiff"])
+
+Image.register_mime(TiffImageFile.format, "image/tiff")
diff --git a/Lib/site-packages/PIL/TiffTags.py b/Lib/site-packages/PIL/TiffTags.py
new file mode 100644
index 0000000..88ff2f4
--- /dev/null
+++ b/Lib/site-packages/PIL/TiffTags.py
@@ -0,0 +1,545 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# TIFF tags
+#
+# This module provides clear-text names for various well-known
+# TIFF tags. the TIFF codec works just fine without it.
+#
+# Copyright (c) Secret Labs AB 1999.
+#
+# See the README file for information on usage and redistribution.
+#
+
+##
+# This module provides constants and clear-text names for various
+# well-known TIFF tags.
+##
+from __future__ import annotations
+
+from collections import namedtuple
+
+
+class TagInfo(namedtuple("_TagInfo", "value name type length enum")):
+ __slots__ = []
+
+ def __new__(cls, value=None, name="unknown", type=None, length=None, enum=None):
+ return super().__new__(cls, value, name, type, length, enum or {})
+
+ def cvt_enum(self, value):
+ # Using get will call hash(value), which can be expensive
+ # for some types (e.g. Fraction). Since self.enum is rarely
+ # used, it's usually better to test it first.
+ return self.enum.get(value, value) if self.enum else value
+
+
+def lookup(tag, group=None):
+ """
+ :param tag: Integer tag number
+ :param group: Which :py:data:`~PIL.TiffTags.TAGS_V2_GROUPS` to look in
+
+ .. versionadded:: 8.3.0
+
+ :returns: Taginfo namedtuple, From the ``TAGS_V2`` info if possible,
+ otherwise just populating the value and name from ``TAGS``.
+ If the tag is not recognized, "unknown" is returned for the name
+
+ """
+
+ if group is not None:
+ info = TAGS_V2_GROUPS[group].get(tag) if group in TAGS_V2_GROUPS else None
+ else:
+ info = TAGS_V2.get(tag)
+ return info or TagInfo(tag, TAGS.get(tag, "unknown"))
+
+
+##
+# Map tag numbers to tag info.
+#
+# id: (Name, Type, Length[, enum_values])
+#
+# The length here differs from the length in the tiff spec. For
+# numbers, the tiff spec is for the number of fields returned. We
+# agree here. For string-like types, the tiff spec uses the length of
+# field in bytes. In Pillow, we are using the number of expected
+# fields, in general 1 for string-like types.
+
+
+BYTE = 1
+ASCII = 2
+SHORT = 3
+LONG = 4
+RATIONAL = 5
+SIGNED_BYTE = 6
+UNDEFINED = 7
+SIGNED_SHORT = 8
+SIGNED_LONG = 9
+SIGNED_RATIONAL = 10
+FLOAT = 11
+DOUBLE = 12
+IFD = 13
+LONG8 = 16
+
+TAGS_V2 = {
+ 254: ("NewSubfileType", LONG, 1),
+ 255: ("SubfileType", SHORT, 1),
+ 256: ("ImageWidth", LONG, 1),
+ 257: ("ImageLength", LONG, 1),
+ 258: ("BitsPerSample", SHORT, 0),
+ 259: (
+ "Compression",
+ SHORT,
+ 1,
+ {
+ "Uncompressed": 1,
+ "CCITT 1d": 2,
+ "Group 3 Fax": 3,
+ "Group 4 Fax": 4,
+ "LZW": 5,
+ "JPEG": 6,
+ "PackBits": 32773,
+ },
+ ),
+ 262: (
+ "PhotometricInterpretation",
+ SHORT,
+ 1,
+ {
+ "WhiteIsZero": 0,
+ "BlackIsZero": 1,
+ "RGB": 2,
+ "RGB Palette": 3,
+ "Transparency Mask": 4,
+ "CMYK": 5,
+ "YCbCr": 6,
+ "CieLAB": 8,
+ "CFA": 32803, # TIFF/EP, Adobe DNG
+ "LinearRaw": 32892, # Adobe DNG
+ },
+ ),
+ 263: ("Threshholding", SHORT, 1),
+ 264: ("CellWidth", SHORT, 1),
+ 265: ("CellLength", SHORT, 1),
+ 266: ("FillOrder", SHORT, 1),
+ 269: ("DocumentName", ASCII, 1),
+ 270: ("ImageDescription", ASCII, 1),
+ 271: ("Make", ASCII, 1),
+ 272: ("Model", ASCII, 1),
+ 273: ("StripOffsets", LONG, 0),
+ 274: ("Orientation", SHORT, 1),
+ 277: ("SamplesPerPixel", SHORT, 1),
+ 278: ("RowsPerStrip", LONG, 1),
+ 279: ("StripByteCounts", LONG, 0),
+ 280: ("MinSampleValue", SHORT, 0),
+ 281: ("MaxSampleValue", SHORT, 0),
+ 282: ("XResolution", RATIONAL, 1),
+ 283: ("YResolution", RATIONAL, 1),
+ 284: ("PlanarConfiguration", SHORT, 1, {"Contiguous": 1, "Separate": 2}),
+ 285: ("PageName", ASCII, 1),
+ 286: ("XPosition", RATIONAL, 1),
+ 287: ("YPosition", RATIONAL, 1),
+ 288: ("FreeOffsets", LONG, 1),
+ 289: ("FreeByteCounts", LONG, 1),
+ 290: ("GrayResponseUnit", SHORT, 1),
+ 291: ("GrayResponseCurve", SHORT, 0),
+ 292: ("T4Options", LONG, 1),
+ 293: ("T6Options", LONG, 1),
+ 296: ("ResolutionUnit", SHORT, 1, {"none": 1, "inch": 2, "cm": 3}),
+ 297: ("PageNumber", SHORT, 2),
+ 301: ("TransferFunction", SHORT, 0),
+ 305: ("Software", ASCII, 1),
+ 306: ("DateTime", ASCII, 1),
+ 315: ("Artist", ASCII, 1),
+ 316: ("HostComputer", ASCII, 1),
+ 317: ("Predictor", SHORT, 1, {"none": 1, "Horizontal Differencing": 2}),
+ 318: ("WhitePoint", RATIONAL, 2),
+ 319: ("PrimaryChromaticities", RATIONAL, 6),
+ 320: ("ColorMap", SHORT, 0),
+ 321: ("HalftoneHints", SHORT, 2),
+ 322: ("TileWidth", LONG, 1),
+ 323: ("TileLength", LONG, 1),
+ 324: ("TileOffsets", LONG, 0),
+ 325: ("TileByteCounts", LONG, 0),
+ 330: ("SubIFDs", LONG, 0),
+ 332: ("InkSet", SHORT, 1),
+ 333: ("InkNames", ASCII, 1),
+ 334: ("NumberOfInks", SHORT, 1),
+ 336: ("DotRange", SHORT, 0),
+ 337: ("TargetPrinter", ASCII, 1),
+ 338: ("ExtraSamples", SHORT, 0),
+ 339: ("SampleFormat", SHORT, 0),
+ 340: ("SMinSampleValue", DOUBLE, 0),
+ 341: ("SMaxSampleValue", DOUBLE, 0),
+ 342: ("TransferRange", SHORT, 6),
+ 347: ("JPEGTables", UNDEFINED, 1),
+ # obsolete JPEG tags
+ 512: ("JPEGProc", SHORT, 1),
+ 513: ("JPEGInterchangeFormat", LONG, 1),
+ 514: ("JPEGInterchangeFormatLength", LONG, 1),
+ 515: ("JPEGRestartInterval", SHORT, 1),
+ 517: ("JPEGLosslessPredictors", SHORT, 0),
+ 518: ("JPEGPointTransforms", SHORT, 0),
+ 519: ("JPEGQTables", LONG, 0),
+ 520: ("JPEGDCTables", LONG, 0),
+ 521: ("JPEGACTables", LONG, 0),
+ 529: ("YCbCrCoefficients", RATIONAL, 3),
+ 530: ("YCbCrSubSampling", SHORT, 2),
+ 531: ("YCbCrPositioning", SHORT, 1),
+ 532: ("ReferenceBlackWhite", RATIONAL, 6),
+ 700: ("XMP", BYTE, 0),
+ 33432: ("Copyright", ASCII, 1),
+ 33723: ("IptcNaaInfo", UNDEFINED, 1),
+ 34377: ("PhotoshopInfo", BYTE, 0),
+ # FIXME add more tags here
+ 34665: ("ExifIFD", LONG, 1),
+ 34675: ("ICCProfile", UNDEFINED, 1),
+ 34853: ("GPSInfoIFD", LONG, 1),
+ 36864: ("ExifVersion", UNDEFINED, 1),
+ 37724: ("ImageSourceData", UNDEFINED, 1),
+ 40965: ("InteroperabilityIFD", LONG, 1),
+ 41730: ("CFAPattern", UNDEFINED, 1),
+ # MPInfo
+ 45056: ("MPFVersion", UNDEFINED, 1),
+ 45057: ("NumberOfImages", LONG, 1),
+ 45058: ("MPEntry", UNDEFINED, 1),
+ 45059: ("ImageUIDList", UNDEFINED, 0), # UNDONE, check
+ 45060: ("TotalFrames", LONG, 1),
+ 45313: ("MPIndividualNum", LONG, 1),
+ 45569: ("PanOrientation", LONG, 1),
+ 45570: ("PanOverlap_H", RATIONAL, 1),
+ 45571: ("PanOverlap_V", RATIONAL, 1),
+ 45572: ("BaseViewpointNum", LONG, 1),
+ 45573: ("ConvergenceAngle", SIGNED_RATIONAL, 1),
+ 45574: ("BaselineLength", RATIONAL, 1),
+ 45575: ("VerticalDivergence", SIGNED_RATIONAL, 1),
+ 45576: ("AxisDistance_X", SIGNED_RATIONAL, 1),
+ 45577: ("AxisDistance_Y", SIGNED_RATIONAL, 1),
+ 45578: ("AxisDistance_Z", SIGNED_RATIONAL, 1),
+ 45579: ("YawAngle", SIGNED_RATIONAL, 1),
+ 45580: ("PitchAngle", SIGNED_RATIONAL, 1),
+ 45581: ("RollAngle", SIGNED_RATIONAL, 1),
+ 40960: ("FlashPixVersion", UNDEFINED, 1),
+ 50741: ("MakerNoteSafety", SHORT, 1, {"Unsafe": 0, "Safe": 1}),
+ 50780: ("BestQualityScale", RATIONAL, 1),
+ 50838: ("ImageJMetaDataByteCounts", LONG, 0), # Can be more than one
+ 50839: ("ImageJMetaData", UNDEFINED, 1), # see Issue #2006
+}
+TAGS_V2_GROUPS = {
+ # ExifIFD
+ 34665: {
+ 36864: ("ExifVersion", UNDEFINED, 1),
+ 40960: ("FlashPixVersion", UNDEFINED, 1),
+ 40965: ("InteroperabilityIFD", LONG, 1),
+ 41730: ("CFAPattern", UNDEFINED, 1),
+ },
+ # GPSInfoIFD
+ 34853: {
+ 0: ("GPSVersionID", BYTE, 4),
+ 1: ("GPSLatitudeRef", ASCII, 2),
+ 2: ("GPSLatitude", RATIONAL, 3),
+ 3: ("GPSLongitudeRef", ASCII, 2),
+ 4: ("GPSLongitude", RATIONAL, 3),
+ 5: ("GPSAltitudeRef", BYTE, 1),
+ 6: ("GPSAltitude", RATIONAL, 1),
+ 7: ("GPSTimeStamp", RATIONAL, 3),
+ 8: ("GPSSatellites", ASCII, 0),
+ 9: ("GPSStatus", ASCII, 2),
+ 10: ("GPSMeasureMode", ASCII, 2),
+ 11: ("GPSDOP", RATIONAL, 1),
+ 12: ("GPSSpeedRef", ASCII, 2),
+ 13: ("GPSSpeed", RATIONAL, 1),
+ 14: ("GPSTrackRef", ASCII, 2),
+ 15: ("GPSTrack", RATIONAL, 1),
+ 16: ("GPSImgDirectionRef", ASCII, 2),
+ 17: ("GPSImgDirection", RATIONAL, 1),
+ 18: ("GPSMapDatum", ASCII, 0),
+ 19: ("GPSDestLatitudeRef", ASCII, 2),
+ 20: ("GPSDestLatitude", RATIONAL, 3),
+ 21: ("GPSDestLongitudeRef", ASCII, 2),
+ 22: ("GPSDestLongitude", RATIONAL, 3),
+ 23: ("GPSDestBearingRef", ASCII, 2),
+ 24: ("GPSDestBearing", RATIONAL, 1),
+ 25: ("GPSDestDistanceRef", ASCII, 2),
+ 26: ("GPSDestDistance", RATIONAL, 1),
+ 27: ("GPSProcessingMethod", UNDEFINED, 0),
+ 28: ("GPSAreaInformation", UNDEFINED, 0),
+ 29: ("GPSDateStamp", ASCII, 11),
+ 30: ("GPSDifferential", SHORT, 1),
+ },
+ # InteroperabilityIFD
+ 40965: {1: ("InteropIndex", ASCII, 1), 2: ("InteropVersion", UNDEFINED, 1)},
+}
+
+# Legacy Tags structure
+# these tags aren't included above, but were in the previous versions
+TAGS = {
+ 347: "JPEGTables",
+ 700: "XMP",
+ # Additional Exif Info
+ 32932: "Wang Annotation",
+ 33434: "ExposureTime",
+ 33437: "FNumber",
+ 33445: "MD FileTag",
+ 33446: "MD ScalePixel",
+ 33447: "MD ColorTable",
+ 33448: "MD LabName",
+ 33449: "MD SampleInfo",
+ 33450: "MD PrepDate",
+ 33451: "MD PrepTime",
+ 33452: "MD FileUnits",
+ 33550: "ModelPixelScaleTag",
+ 33723: "IptcNaaInfo",
+ 33918: "INGR Packet Data Tag",
+ 33919: "INGR Flag Registers",
+ 33920: "IrasB Transformation Matrix",
+ 33922: "ModelTiepointTag",
+ 34264: "ModelTransformationTag",
+ 34377: "PhotoshopInfo",
+ 34735: "GeoKeyDirectoryTag",
+ 34736: "GeoDoubleParamsTag",
+ 34737: "GeoAsciiParamsTag",
+ 34850: "ExposureProgram",
+ 34852: "SpectralSensitivity",
+ 34855: "ISOSpeedRatings",
+ 34856: "OECF",
+ 34864: "SensitivityType",
+ 34865: "StandardOutputSensitivity",
+ 34866: "RecommendedExposureIndex",
+ 34867: "ISOSpeed",
+ 34868: "ISOSpeedLatitudeyyy",
+ 34869: "ISOSpeedLatitudezzz",
+ 34908: "HylaFAX FaxRecvParams",
+ 34909: "HylaFAX FaxSubAddress",
+ 34910: "HylaFAX FaxRecvTime",
+ 36864: "ExifVersion",
+ 36867: "DateTimeOriginal",
+ 36868: "DateTimeDigitized",
+ 37121: "ComponentsConfiguration",
+ 37122: "CompressedBitsPerPixel",
+ 37724: "ImageSourceData",
+ 37377: "ShutterSpeedValue",
+ 37378: "ApertureValue",
+ 37379: "BrightnessValue",
+ 37380: "ExposureBiasValue",
+ 37381: "MaxApertureValue",
+ 37382: "SubjectDistance",
+ 37383: "MeteringMode",
+ 37384: "LightSource",
+ 37385: "Flash",
+ 37386: "FocalLength",
+ 37396: "SubjectArea",
+ 37500: "MakerNote",
+ 37510: "UserComment",
+ 37520: "SubSec",
+ 37521: "SubSecTimeOriginal",
+ 37522: "SubsecTimeDigitized",
+ 40960: "FlashPixVersion",
+ 40961: "ColorSpace",
+ 40962: "PixelXDimension",
+ 40963: "PixelYDimension",
+ 40964: "RelatedSoundFile",
+ 40965: "InteroperabilityIFD",
+ 41483: "FlashEnergy",
+ 41484: "SpatialFrequencyResponse",
+ 41486: "FocalPlaneXResolution",
+ 41487: "FocalPlaneYResolution",
+ 41488: "FocalPlaneResolutionUnit",
+ 41492: "SubjectLocation",
+ 41493: "ExposureIndex",
+ 41495: "SensingMethod",
+ 41728: "FileSource",
+ 41729: "SceneType",
+ 41730: "CFAPattern",
+ 41985: "CustomRendered",
+ 41986: "ExposureMode",
+ 41987: "WhiteBalance",
+ 41988: "DigitalZoomRatio",
+ 41989: "FocalLengthIn35mmFilm",
+ 41990: "SceneCaptureType",
+ 41991: "GainControl",
+ 41992: "Contrast",
+ 41993: "Saturation",
+ 41994: "Sharpness",
+ 41995: "DeviceSettingDescription",
+ 41996: "SubjectDistanceRange",
+ 42016: "ImageUniqueID",
+ 42032: "CameraOwnerName",
+ 42033: "BodySerialNumber",
+ 42034: "LensSpecification",
+ 42035: "LensMake",
+ 42036: "LensModel",
+ 42037: "LensSerialNumber",
+ 42112: "GDAL_METADATA",
+ 42113: "GDAL_NODATA",
+ 42240: "Gamma",
+ 50215: "Oce Scanjob Description",
+ 50216: "Oce Application Selector",
+ 50217: "Oce Identification Number",
+ 50218: "Oce ImageLogic Characteristics",
+ # Adobe DNG
+ 50706: "DNGVersion",
+ 50707: "DNGBackwardVersion",
+ 50708: "UniqueCameraModel",
+ 50709: "LocalizedCameraModel",
+ 50710: "CFAPlaneColor",
+ 50711: "CFALayout",
+ 50712: "LinearizationTable",
+ 50713: "BlackLevelRepeatDim",
+ 50714: "BlackLevel",
+ 50715: "BlackLevelDeltaH",
+ 50716: "BlackLevelDeltaV",
+ 50717: "WhiteLevel",
+ 50718: "DefaultScale",
+ 50719: "DefaultCropOrigin",
+ 50720: "DefaultCropSize",
+ 50721: "ColorMatrix1",
+ 50722: "ColorMatrix2",
+ 50723: "CameraCalibration1",
+ 50724: "CameraCalibration2",
+ 50725: "ReductionMatrix1",
+ 50726: "ReductionMatrix2",
+ 50727: "AnalogBalance",
+ 50728: "AsShotNeutral",
+ 50729: "AsShotWhiteXY",
+ 50730: "BaselineExposure",
+ 50731: "BaselineNoise",
+ 50732: "BaselineSharpness",
+ 50733: "BayerGreenSplit",
+ 50734: "LinearResponseLimit",
+ 50735: "CameraSerialNumber",
+ 50736: "LensInfo",
+ 50737: "ChromaBlurRadius",
+ 50738: "AntiAliasStrength",
+ 50740: "DNGPrivateData",
+ 50778: "CalibrationIlluminant1",
+ 50779: "CalibrationIlluminant2",
+ 50784: "Alias Layer Metadata",
+}
+
+
+def _populate():
+ for k, v in TAGS_V2.items():
+ # Populate legacy structure.
+ TAGS[k] = v[0]
+ if len(v) == 4:
+ for sk, sv in v[3].items():
+ TAGS[(k, sv)] = sk
+
+ TAGS_V2[k] = TagInfo(k, *v)
+
+ for tags in TAGS_V2_GROUPS.values():
+ for k, v in tags.items():
+ tags[k] = TagInfo(k, *v)
+
+
+_populate()
+##
+# Map type numbers to type names -- defined in ImageFileDirectory.
+
+TYPES = {}
+
+#
+# These tags are handled by default in libtiff, without
+# adding to the custom dictionary. From tif_dir.c, searching for
+# case TIFFTAG in the _TIFFVSetField function:
+# Line: item.
+# 148: case TIFFTAG_SUBFILETYPE:
+# 151: case TIFFTAG_IMAGEWIDTH:
+# 154: case TIFFTAG_IMAGELENGTH:
+# 157: case TIFFTAG_BITSPERSAMPLE:
+# 181: case TIFFTAG_COMPRESSION:
+# 202: case TIFFTAG_PHOTOMETRIC:
+# 205: case TIFFTAG_THRESHHOLDING:
+# 208: case TIFFTAG_FILLORDER:
+# 214: case TIFFTAG_ORIENTATION:
+# 221: case TIFFTAG_SAMPLESPERPIXEL:
+# 228: case TIFFTAG_ROWSPERSTRIP:
+# 238: case TIFFTAG_MINSAMPLEVALUE:
+# 241: case TIFFTAG_MAXSAMPLEVALUE:
+# 244: case TIFFTAG_SMINSAMPLEVALUE:
+# 247: case TIFFTAG_SMAXSAMPLEVALUE:
+# 250: case TIFFTAG_XRESOLUTION:
+# 256: case TIFFTAG_YRESOLUTION:
+# 262: case TIFFTAG_PLANARCONFIG:
+# 268: case TIFFTAG_XPOSITION:
+# 271: case TIFFTAG_YPOSITION:
+# 274: case TIFFTAG_RESOLUTIONUNIT:
+# 280: case TIFFTAG_PAGENUMBER:
+# 284: case TIFFTAG_HALFTONEHINTS:
+# 288: case TIFFTAG_COLORMAP:
+# 294: case TIFFTAG_EXTRASAMPLES:
+# 298: case TIFFTAG_MATTEING:
+# 305: case TIFFTAG_TILEWIDTH:
+# 316: case TIFFTAG_TILELENGTH:
+# 327: case TIFFTAG_TILEDEPTH:
+# 333: case TIFFTAG_DATATYPE:
+# 344: case TIFFTAG_SAMPLEFORMAT:
+# 361: case TIFFTAG_IMAGEDEPTH:
+# 364: case TIFFTAG_SUBIFD:
+# 376: case TIFFTAG_YCBCRPOSITIONING:
+# 379: case TIFFTAG_YCBCRSUBSAMPLING:
+# 383: case TIFFTAG_TRANSFERFUNCTION:
+# 389: case TIFFTAG_REFERENCEBLACKWHITE:
+# 393: case TIFFTAG_INKNAMES:
+
+# Following pseudo-tags are also handled by default in libtiff:
+# TIFFTAG_JPEGQUALITY 65537
+
+# some of these are not in our TAGS_V2 dict and were included from tiff.h
+
+# This list also exists in encode.c
+LIBTIFF_CORE = {
+ 255,
+ 256,
+ 257,
+ 258,
+ 259,
+ 262,
+ 263,
+ 266,
+ 274,
+ 277,
+ 278,
+ 280,
+ 281,
+ 340,
+ 341,
+ 282,
+ 283,
+ 284,
+ 286,
+ 287,
+ 296,
+ 297,
+ 321,
+ 320,
+ 338,
+ 32995,
+ 322,
+ 323,
+ 32998,
+ 32996,
+ 339,
+ 32997,
+ 330,
+ 531,
+ 530,
+ 301,
+ 532,
+ 333,
+ # as above
+ 269, # this has been in our tests forever, and works
+ 65537,
+}
+
+LIBTIFF_CORE.remove(255) # We don't have support for subfiletypes
+LIBTIFF_CORE.remove(322) # We don't have support for writing tiled images with libtiff
+LIBTIFF_CORE.remove(323) # Tiled images
+LIBTIFF_CORE.remove(333) # Ink Names either
+
+# Note to advanced users: There may be combinations of these
+# parameters and values that when added properly, will work and
+# produce valid tiff images that may work in your application.
+# It is safe to add and remove tags from this set from Pillow's point
+# of view so long as you test against libtiff.
diff --git a/Lib/site-packages/PIL/WalImageFile.py b/Lib/site-packages/PIL/WalImageFile.py
new file mode 100644
index 0000000..c5bf3e0
--- /dev/null
+++ b/Lib/site-packages/PIL/WalImageFile.py
@@ -0,0 +1,124 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# WAL file handling
+#
+# History:
+# 2003-04-23 fl created
+#
+# Copyright (c) 2003 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+
+"""
+This reader is based on the specification available from:
+https://www.flipcode.com/archives/Quake_2_BSP_File_Format.shtml
+and has been tested with a few sample files found using google.
+
+.. note::
+ This format cannot be automatically recognized, so the reader
+ is not registered for use with :py:func:`PIL.Image.open()`.
+ To open a WAL file, use the :py:func:`PIL.WalImageFile.open()` function instead.
+"""
+from __future__ import annotations
+
+from . import Image, ImageFile
+from ._binary import i32le as i32
+
+
+class WalImageFile(ImageFile.ImageFile):
+ format = "WAL"
+ format_description = "Quake2 Texture"
+
+ def _open(self):
+ self._mode = "P"
+
+ # read header fields
+ header = self.fp.read(32 + 24 + 32 + 12)
+ self._size = i32(header, 32), i32(header, 36)
+ Image._decompression_bomb_check(self.size)
+
+ # load pixel data
+ offset = i32(header, 40)
+ self.fp.seek(offset)
+
+ # strings are null-terminated
+ self.info["name"] = header[:32].split(b"\0", 1)[0]
+ next_name = header[56 : 56 + 32].split(b"\0", 1)[0]
+ if next_name:
+ self.info["next_name"] = next_name
+
+ def load(self):
+ if not self.im:
+ self.im = Image.core.new(self.mode, self.size)
+ self.frombytes(self.fp.read(self.size[0] * self.size[1]))
+ self.putpalette(quake2palette)
+ return Image.Image.load(self)
+
+
+def open(filename):
+ """
+ Load texture from a Quake2 WAL texture file.
+
+ By default, a Quake2 standard palette is attached to the texture.
+ To override the palette, use the :py:func:`PIL.Image.Image.putpalette()` method.
+
+ :param filename: WAL file name, or an opened file handle.
+ :returns: An image instance.
+ """
+ return WalImageFile(filename)
+
+
+quake2palette = (
+ # default palette taken from piffo 0.93 by Hans Häggström
+ b"\x01\x01\x01\x0b\x0b\x0b\x12\x12\x12\x17\x17\x17\x1b\x1b\x1b\x1e"
+ b"\x1e\x1e\x22\x22\x22\x26\x26\x26\x29\x29\x29\x2c\x2c\x2c\x2f\x2f"
+ b"\x2f\x32\x32\x32\x35\x35\x35\x37\x37\x37\x3a\x3a\x3a\x3c\x3c\x3c"
+ b"\x24\x1e\x13\x22\x1c\x12\x20\x1b\x12\x1f\x1a\x10\x1d\x19\x10\x1b"
+ b"\x17\x0f\x1a\x16\x0f\x18\x14\x0d\x17\x13\x0d\x16\x12\x0d\x14\x10"
+ b"\x0b\x13\x0f\x0b\x10\x0d\x0a\x0f\x0b\x0a\x0d\x0b\x07\x0b\x0a\x07"
+ b"\x23\x23\x26\x22\x22\x25\x22\x20\x23\x21\x1f\x22\x20\x1e\x20\x1f"
+ b"\x1d\x1e\x1d\x1b\x1c\x1b\x1a\x1a\x1a\x19\x19\x18\x17\x17\x17\x16"
+ b"\x16\x14\x14\x14\x13\x13\x13\x10\x10\x10\x0f\x0f\x0f\x0d\x0d\x0d"
+ b"\x2d\x28\x20\x29\x24\x1c\x27\x22\x1a\x25\x1f\x17\x38\x2e\x1e\x31"
+ b"\x29\x1a\x2c\x25\x17\x26\x20\x14\x3c\x30\x14\x37\x2c\x13\x33\x28"
+ b"\x12\x2d\x24\x10\x28\x1f\x0f\x22\x1a\x0b\x1b\x14\x0a\x13\x0f\x07"
+ b"\x31\x1a\x16\x30\x17\x13\x2e\x16\x10\x2c\x14\x0d\x2a\x12\x0b\x27"
+ b"\x0f\x0a\x25\x0f\x07\x21\x0d\x01\x1e\x0b\x01\x1c\x0b\x01\x1a\x0b"
+ b"\x01\x18\x0a\x01\x16\x0a\x01\x13\x0a\x01\x10\x07\x01\x0d\x07\x01"
+ b"\x29\x23\x1e\x27\x21\x1c\x26\x20\x1b\x25\x1f\x1a\x23\x1d\x19\x21"
+ b"\x1c\x18\x20\x1b\x17\x1e\x19\x16\x1c\x18\x14\x1b\x17\x13\x19\x14"
+ b"\x10\x17\x13\x0f\x14\x10\x0d\x12\x0f\x0b\x0f\x0b\x0a\x0b\x0a\x07"
+ b"\x26\x1a\x0f\x23\x19\x0f\x20\x17\x0f\x1c\x16\x0f\x19\x13\x0d\x14"
+ b"\x10\x0b\x10\x0d\x0a\x0b\x0a\x07\x33\x22\x1f\x35\x29\x26\x37\x2f"
+ b"\x2d\x39\x35\x34\x37\x39\x3a\x33\x37\x39\x30\x34\x36\x2b\x31\x34"
+ b"\x27\x2e\x31\x22\x2b\x2f\x1d\x28\x2c\x17\x25\x2a\x0f\x20\x26\x0d"
+ b"\x1e\x25\x0b\x1c\x22\x0a\x1b\x20\x07\x19\x1e\x07\x17\x1b\x07\x14"
+ b"\x18\x01\x12\x16\x01\x0f\x12\x01\x0b\x0d\x01\x07\x0a\x01\x01\x01"
+ b"\x2c\x21\x21\x2a\x1f\x1f\x29\x1d\x1d\x27\x1c\x1c\x26\x1a\x1a\x24"
+ b"\x18\x18\x22\x17\x17\x21\x16\x16\x1e\x13\x13\x1b\x12\x12\x18\x10"
+ b"\x10\x16\x0d\x0d\x12\x0b\x0b\x0d\x0a\x0a\x0a\x07\x07\x01\x01\x01"
+ b"\x2e\x30\x29\x2d\x2e\x27\x2b\x2c\x26\x2a\x2a\x24\x28\x29\x23\x27"
+ b"\x27\x21\x26\x26\x1f\x24\x24\x1d\x22\x22\x1c\x1f\x1f\x1a\x1c\x1c"
+ b"\x18\x19\x19\x16\x17\x17\x13\x13\x13\x10\x0f\x0f\x0d\x0b\x0b\x0a"
+ b"\x30\x1e\x1b\x2d\x1c\x19\x2c\x1a\x17\x2a\x19\x14\x28\x17\x13\x26"
+ b"\x16\x10\x24\x13\x0f\x21\x12\x0d\x1f\x10\x0b\x1c\x0f\x0a\x19\x0d"
+ b"\x0a\x16\x0b\x07\x12\x0a\x07\x0f\x07\x01\x0a\x01\x01\x01\x01\x01"
+ b"\x28\x29\x38\x26\x27\x36\x25\x26\x34\x24\x24\x31\x22\x22\x2f\x20"
+ b"\x21\x2d\x1e\x1f\x2a\x1d\x1d\x27\x1b\x1b\x25\x19\x19\x21\x17\x17"
+ b"\x1e\x14\x14\x1b\x13\x12\x17\x10\x0f\x13\x0d\x0b\x0f\x0a\x07\x07"
+ b"\x2f\x32\x29\x2d\x30\x26\x2b\x2e\x24\x29\x2c\x21\x27\x2a\x1e\x25"
+ b"\x28\x1c\x23\x26\x1a\x21\x25\x18\x1e\x22\x14\x1b\x1f\x10\x19\x1c"
+ b"\x0d\x17\x1a\x0a\x13\x17\x07\x10\x13\x01\x0d\x0f\x01\x0a\x0b\x01"
+ b"\x01\x3f\x01\x13\x3c\x0b\x1b\x39\x10\x20\x35\x14\x23\x31\x17\x23"
+ b"\x2d\x18\x23\x29\x18\x3f\x3f\x3f\x3f\x3f\x39\x3f\x3f\x31\x3f\x3f"
+ b"\x2a\x3f\x3f\x20\x3f\x3f\x14\x3f\x3c\x12\x3f\x39\x0f\x3f\x35\x0b"
+ b"\x3f\x32\x07\x3f\x2d\x01\x3d\x2a\x01\x3b\x26\x01\x39\x21\x01\x37"
+ b"\x1d\x01\x34\x1a\x01\x32\x16\x01\x2f\x12\x01\x2d\x0f\x01\x2a\x0b"
+ b"\x01\x27\x07\x01\x23\x01\x01\x1d\x01\x01\x17\x01\x01\x10\x01\x01"
+ b"\x3d\x01\x01\x19\x19\x3f\x3f\x01\x01\x01\x01\x3f\x16\x16\x13\x10"
+ b"\x10\x0f\x0d\x0d\x0b\x3c\x2e\x2a\x36\x27\x20\x30\x21\x18\x29\x1b"
+ b"\x10\x3c\x39\x37\x37\x32\x2f\x31\x2c\x28\x2b\x26\x21\x30\x22\x20"
+)
diff --git a/Lib/site-packages/PIL/WebPImagePlugin.py b/Lib/site-packages/PIL/WebPImagePlugin.py
new file mode 100644
index 0000000..5955620
--- /dev/null
+++ b/Lib/site-packages/PIL/WebPImagePlugin.py
@@ -0,0 +1,366 @@
+from __future__ import annotations
+
+from io import BytesIO
+
+from . import Image, ImageFile
+
+try:
+ from . import _webp
+
+ SUPPORTED = True
+except ImportError:
+ SUPPORTED = False
+
+
+_VALID_WEBP_MODES = {"RGBX": True, "RGBA": True, "RGB": True}
+
+_VALID_WEBP_LEGACY_MODES = {"RGB": True, "RGBA": True}
+
+_VP8_MODES_BY_IDENTIFIER = {
+ b"VP8 ": "RGB",
+ b"VP8X": "RGBA",
+ b"VP8L": "RGBA", # lossless
+}
+
+
+def _accept(prefix):
+ is_riff_file_format = prefix[:4] == b"RIFF"
+ is_webp_file = prefix[8:12] == b"WEBP"
+ is_valid_vp8_mode = prefix[12:16] in _VP8_MODES_BY_IDENTIFIER
+
+ if is_riff_file_format and is_webp_file and is_valid_vp8_mode:
+ if not SUPPORTED:
+ return (
+ "image file could not be identified because WEBP support not installed"
+ )
+ return True
+
+
+class WebPImageFile(ImageFile.ImageFile):
+ format = "WEBP"
+ format_description = "WebP image"
+ __loaded = 0
+ __logical_frame = 0
+
+ def _open(self):
+ if not _webp.HAVE_WEBPANIM:
+ # Legacy mode
+ data, width, height, self._mode, icc_profile, exif = _webp.WebPDecode(
+ self.fp.read()
+ )
+ if icc_profile:
+ self.info["icc_profile"] = icc_profile
+ if exif:
+ self.info["exif"] = exif
+ self._size = width, height
+ self.fp = BytesIO(data)
+ self.tile = [("raw", (0, 0) + self.size, 0, self.mode)]
+ self.n_frames = 1
+ self.is_animated = False
+ return
+
+ # Use the newer AnimDecoder API to parse the (possibly) animated file,
+ # and access muxed chunks like ICC/EXIF/XMP.
+ self._decoder = _webp.WebPAnimDecoder(self.fp.read())
+
+ # Get info from decoder
+ width, height, loop_count, bgcolor, frame_count, mode = self._decoder.get_info()
+ self._size = width, height
+ self.info["loop"] = loop_count
+ bg_a, bg_r, bg_g, bg_b = (
+ (bgcolor >> 24) & 0xFF,
+ (bgcolor >> 16) & 0xFF,
+ (bgcolor >> 8) & 0xFF,
+ bgcolor & 0xFF,
+ )
+ self.info["background"] = (bg_r, bg_g, bg_b, bg_a)
+ self.n_frames = frame_count
+ self.is_animated = self.n_frames > 1
+ self._mode = "RGB" if mode == "RGBX" else mode
+ self.rawmode = mode
+ self.tile = []
+
+ # Attempt to read ICC / EXIF / XMP chunks from file
+ icc_profile = self._decoder.get_chunk("ICCP")
+ exif = self._decoder.get_chunk("EXIF")
+ xmp = self._decoder.get_chunk("XMP ")
+ if icc_profile:
+ self.info["icc_profile"] = icc_profile
+ if exif:
+ self.info["exif"] = exif
+ if xmp:
+ self.info["xmp"] = xmp
+
+ # Initialize seek state
+ self._reset(reset=False)
+
+ def _getexif(self):
+ if "exif" not in self.info:
+ return None
+ return self.getexif()._get_merged_dict()
+
+ def getxmp(self):
+ """
+ Returns a dictionary containing the XMP tags.
+ Requires defusedxml to be installed.
+
+ :returns: XMP tags in a dictionary.
+ """
+ return self._getxmp(self.info["xmp"]) if "xmp" in self.info else {}
+
+ def seek(self, frame):
+ if not self._seek_check(frame):
+ return
+
+ # Set logical frame to requested position
+ self.__logical_frame = frame
+
+ def _reset(self, reset=True):
+ if reset:
+ self._decoder.reset()
+ self.__physical_frame = 0
+ self.__loaded = -1
+ self.__timestamp = 0
+
+ def _get_next(self):
+ # Get next frame
+ ret = self._decoder.get_next()
+ self.__physical_frame += 1
+
+ # Check if an error occurred
+ if ret is None:
+ self._reset() # Reset just to be safe
+ self.seek(0)
+ msg = "failed to decode next frame in WebP file"
+ raise EOFError(msg)
+
+ # Compute duration
+ data, timestamp = ret
+ duration = timestamp - self.__timestamp
+ self.__timestamp = timestamp
+
+ # libwebp gives frame end, adjust to start of frame
+ timestamp -= duration
+ return data, timestamp, duration
+
+ def _seek(self, frame):
+ if self.__physical_frame == frame:
+ return # Nothing to do
+ if frame < self.__physical_frame:
+ self._reset() # Rewind to beginning
+ while self.__physical_frame < frame:
+ self._get_next() # Advance to the requested frame
+
+ def load(self):
+ if _webp.HAVE_WEBPANIM:
+ if self.__loaded != self.__logical_frame:
+ self._seek(self.__logical_frame)
+
+ # We need to load the image data for this frame
+ data, timestamp, duration = self._get_next()
+ self.info["timestamp"] = timestamp
+ self.info["duration"] = duration
+ self.__loaded = self.__logical_frame
+
+ # Set tile
+ if self.fp and self._exclusive_fp:
+ self.fp.close()
+ self.fp = BytesIO(data)
+ self.tile = [("raw", (0, 0) + self.size, 0, self.rawmode)]
+
+ return super().load()
+
+ def load_seek(self, pos):
+ pass
+
+ def tell(self):
+ if not _webp.HAVE_WEBPANIM:
+ return super().tell()
+
+ return self.__logical_frame
+
+
+def _save_all(im, fp, filename):
+ encoderinfo = im.encoderinfo.copy()
+ append_images = list(encoderinfo.get("append_images", []))
+
+ # If total frame count is 1, then save using the legacy API, which
+ # will preserve non-alpha modes
+ total = 0
+ for ims in [im] + append_images:
+ total += getattr(ims, "n_frames", 1)
+ if total == 1:
+ _save(im, fp, filename)
+ return
+
+ background = (0, 0, 0, 0)
+ if "background" in encoderinfo:
+ background = encoderinfo["background"]
+ elif "background" in im.info:
+ background = im.info["background"]
+ if isinstance(background, int):
+ # GifImagePlugin stores a global color table index in
+ # info["background"]. So it must be converted to an RGBA value
+ palette = im.getpalette()
+ if palette:
+ r, g, b = palette[background * 3 : (background + 1) * 3]
+ background = (r, g, b, 255)
+ else:
+ background = (background, background, background, 255)
+
+ duration = im.encoderinfo.get("duration", im.info.get("duration", 0))
+ loop = im.encoderinfo.get("loop", 0)
+ minimize_size = im.encoderinfo.get("minimize_size", False)
+ kmin = im.encoderinfo.get("kmin", None)
+ kmax = im.encoderinfo.get("kmax", None)
+ allow_mixed = im.encoderinfo.get("allow_mixed", False)
+ verbose = False
+ lossless = im.encoderinfo.get("lossless", False)
+ quality = im.encoderinfo.get("quality", 80)
+ method = im.encoderinfo.get("method", 0)
+ icc_profile = im.encoderinfo.get("icc_profile") or ""
+ exif = im.encoderinfo.get("exif", "")
+ if isinstance(exif, Image.Exif):
+ exif = exif.tobytes()
+ xmp = im.encoderinfo.get("xmp", "")
+ if allow_mixed:
+ lossless = False
+
+ # Sensible keyframe defaults are from gif2webp.c script
+ if kmin is None:
+ kmin = 9 if lossless else 3
+ if kmax is None:
+ kmax = 17 if lossless else 5
+
+ # Validate background color
+ if (
+ not isinstance(background, (list, tuple))
+ or len(background) != 4
+ or not all(0 <= v < 256 for v in background)
+ ):
+ msg = f"Background color is not an RGBA tuple clamped to (0-255): {background}"
+ raise OSError(msg)
+
+ # Convert to packed uint
+ bg_r, bg_g, bg_b, bg_a = background
+ background = (bg_a << 24) | (bg_r << 16) | (bg_g << 8) | (bg_b << 0)
+
+ # Setup the WebP animation encoder
+ enc = _webp.WebPAnimEncoder(
+ im.size[0],
+ im.size[1],
+ background,
+ loop,
+ minimize_size,
+ kmin,
+ kmax,
+ allow_mixed,
+ verbose,
+ )
+
+ # Add each frame
+ frame_idx = 0
+ timestamp = 0
+ cur_idx = im.tell()
+ try:
+ for ims in [im] + append_images:
+ # Get # of frames in this image
+ nfr = getattr(ims, "n_frames", 1)
+
+ for idx in range(nfr):
+ ims.seek(idx)
+ ims.load()
+
+ # Make sure image mode is supported
+ frame = ims
+ rawmode = ims.mode
+ if ims.mode not in _VALID_WEBP_MODES:
+ alpha = (
+ "A" in ims.mode
+ or "a" in ims.mode
+ or (ims.mode == "P" and "A" in ims.im.getpalettemode())
+ )
+ rawmode = "RGBA" if alpha else "RGB"
+ frame = ims.convert(rawmode)
+
+ if rawmode == "RGB":
+ # For faster conversion, use RGBX
+ rawmode = "RGBX"
+
+ # Append the frame to the animation encoder
+ enc.add(
+ frame.tobytes("raw", rawmode),
+ round(timestamp),
+ frame.size[0],
+ frame.size[1],
+ rawmode,
+ lossless,
+ quality,
+ method,
+ )
+
+ # Update timestamp and frame index
+ if isinstance(duration, (list, tuple)):
+ timestamp += duration[frame_idx]
+ else:
+ timestamp += duration
+ frame_idx += 1
+
+ finally:
+ im.seek(cur_idx)
+
+ # Force encoder to flush frames
+ enc.add(None, round(timestamp), 0, 0, "", lossless, quality, 0)
+
+ # Get the final output from the encoder
+ data = enc.assemble(icc_profile, exif, xmp)
+ if data is None:
+ msg = "cannot write file as WebP (encoder returned None)"
+ raise OSError(msg)
+
+ fp.write(data)
+
+
+def _save(im, fp, filename):
+ lossless = im.encoderinfo.get("lossless", False)
+ quality = im.encoderinfo.get("quality", 80)
+ icc_profile = im.encoderinfo.get("icc_profile") or ""
+ exif = im.encoderinfo.get("exif", b"")
+ if isinstance(exif, Image.Exif):
+ exif = exif.tobytes()
+ if exif.startswith(b"Exif\x00\x00"):
+ exif = exif[6:]
+ xmp = im.encoderinfo.get("xmp", "")
+ method = im.encoderinfo.get("method", 4)
+ exact = 1 if im.encoderinfo.get("exact") else 0
+
+ if im.mode not in _VALID_WEBP_LEGACY_MODES:
+ im = im.convert("RGBA" if im.has_transparency_data else "RGB")
+
+ data = _webp.WebPEncode(
+ im.tobytes(),
+ im.size[0],
+ im.size[1],
+ lossless,
+ float(quality),
+ im.mode,
+ icc_profile,
+ method,
+ exact,
+ exif,
+ xmp,
+ )
+ if data is None:
+ msg = "cannot write file as WebP (encoder returned None)"
+ raise OSError(msg)
+
+ fp.write(data)
+
+
+Image.register_open(WebPImageFile.format, WebPImageFile, _accept)
+if SUPPORTED:
+ Image.register_save(WebPImageFile.format, _save)
+ if _webp.HAVE_WEBPANIM:
+ Image.register_save_all(WebPImageFile.format, _save_all)
+ Image.register_extension(WebPImageFile.format, ".webp")
+ Image.register_mime(WebPImageFile.format, "image/webp")
diff --git a/Lib/site-packages/PIL/WmfImagePlugin.py b/Lib/site-packages/PIL/WmfImagePlugin.py
new file mode 100644
index 0000000..b5b8c69
--- /dev/null
+++ b/Lib/site-packages/PIL/WmfImagePlugin.py
@@ -0,0 +1,179 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# WMF stub codec
+#
+# history:
+# 1996-12-14 fl Created
+# 2004-02-22 fl Turned into a stub driver
+# 2004-02-23 fl Added EMF support
+#
+# Copyright (c) Secret Labs AB 1997-2004. All rights reserved.
+# Copyright (c) Fredrik Lundh 1996.
+#
+# See the README file for information on usage and redistribution.
+#
+# WMF/EMF reference documentation:
+# https://winprotocoldoc.blob.core.windows.net/productionwindowsarchives/MS-WMF/[MS-WMF].pdf
+# http://wvware.sourceforge.net/caolan/index.html
+# http://wvware.sourceforge.net/caolan/ora-wmf.html
+from __future__ import annotations
+
+from . import Image, ImageFile
+from ._binary import i16le as word
+from ._binary import si16le as short
+from ._binary import si32le as _long
+
+_handler = None
+
+
+def register_handler(handler):
+ """
+ Install application-specific WMF image handler.
+
+ :param handler: Handler object.
+ """
+ global _handler
+ _handler = handler
+
+
+if hasattr(Image.core, "drawwmf"):
+ # install default handler (windows only)
+
+ class WmfHandler:
+ def open(self, im):
+ im._mode = "RGB"
+ self.bbox = im.info["wmf_bbox"]
+
+ def load(self, im):
+ im.fp.seek(0) # rewind
+ return Image.frombytes(
+ "RGB",
+ im.size,
+ Image.core.drawwmf(im.fp.read(), im.size, self.bbox),
+ "raw",
+ "BGR",
+ (im.size[0] * 3 + 3) & -4,
+ -1,
+ )
+
+ register_handler(WmfHandler())
+
+#
+# --------------------------------------------------------------------
+# Read WMF file
+
+
+def _accept(prefix):
+ return (
+ prefix[:6] == b"\xd7\xcd\xc6\x9a\x00\x00" or prefix[:4] == b"\x01\x00\x00\x00"
+ )
+
+
+##
+# Image plugin for Windows metafiles.
+
+
+class WmfStubImageFile(ImageFile.StubImageFile):
+ format = "WMF"
+ format_description = "Windows Metafile"
+
+ def _open(self):
+ self._inch = None
+
+ # check placable header
+ s = self.fp.read(80)
+
+ if s[:6] == b"\xd7\xcd\xc6\x9a\x00\x00":
+ # placeable windows metafile
+
+ # get units per inch
+ self._inch = word(s, 14)
+
+ # get bounding box
+ x0 = short(s, 6)
+ y0 = short(s, 8)
+ x1 = short(s, 10)
+ y1 = short(s, 12)
+
+ # normalize size to 72 dots per inch
+ self.info["dpi"] = 72
+ size = (
+ (x1 - x0) * self.info["dpi"] // self._inch,
+ (y1 - y0) * self.info["dpi"] // self._inch,
+ )
+
+ self.info["wmf_bbox"] = x0, y0, x1, y1
+
+ # sanity check (standard metafile header)
+ if s[22:26] != b"\x01\x00\t\x00":
+ msg = "Unsupported WMF file format"
+ raise SyntaxError(msg)
+
+ elif s[:4] == b"\x01\x00\x00\x00" and s[40:44] == b" EMF":
+ # enhanced metafile
+
+ # get bounding box
+ x0 = _long(s, 8)
+ y0 = _long(s, 12)
+ x1 = _long(s, 16)
+ y1 = _long(s, 20)
+
+ # get frame (in 0.01 millimeter units)
+ frame = _long(s, 24), _long(s, 28), _long(s, 32), _long(s, 36)
+
+ size = x1 - x0, y1 - y0
+
+ # calculate dots per inch from bbox and frame
+ xdpi = 2540.0 * (x1 - y0) / (frame[2] - frame[0])
+ ydpi = 2540.0 * (y1 - y0) / (frame[3] - frame[1])
+
+ self.info["wmf_bbox"] = x0, y0, x1, y1
+
+ if xdpi == ydpi:
+ self.info["dpi"] = xdpi
+ else:
+ self.info["dpi"] = xdpi, ydpi
+
+ else:
+ msg = "Unsupported file format"
+ raise SyntaxError(msg)
+
+ self._mode = "RGB"
+ self._size = size
+
+ loader = self._load()
+ if loader:
+ loader.open(self)
+
+ def _load(self):
+ return _handler
+
+ def load(self, dpi=None):
+ if dpi is not None and self._inch is not None:
+ self.info["dpi"] = dpi
+ x0, y0, x1, y1 = self.info["wmf_bbox"]
+ self._size = (
+ (x1 - x0) * self.info["dpi"] // self._inch,
+ (y1 - y0) * self.info["dpi"] // self._inch,
+ )
+ return super().load()
+
+
+def _save(im, fp, filename):
+ if _handler is None or not hasattr(_handler, "save"):
+ msg = "WMF save handler not installed"
+ raise OSError(msg)
+ _handler.save(im, fp, filename)
+
+
+#
+# --------------------------------------------------------------------
+# Registry stuff
+
+
+Image.register_open(WmfStubImageFile.format, WmfStubImageFile, _accept)
+Image.register_save(WmfStubImageFile.format, _save)
+
+Image.register_extensions(WmfStubImageFile.format, [".wmf", ".emf"])
diff --git a/Lib/site-packages/PIL/XVThumbImagePlugin.py b/Lib/site-packages/PIL/XVThumbImagePlugin.py
new file mode 100644
index 0000000..47ba1c5
--- /dev/null
+++ b/Lib/site-packages/PIL/XVThumbImagePlugin.py
@@ -0,0 +1,79 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# XV Thumbnail file handler by Charles E. "Gene" Cash
+# (gcash@magicnet.net)
+#
+# see xvcolor.c and xvbrowse.c in the sources to John Bradley's XV,
+# available from ftp://ftp.cis.upenn.edu/pub/xv/
+#
+# history:
+# 98-08-15 cec created (b/w only)
+# 98-12-09 cec added color palette
+# 98-12-28 fl added to PIL (with only a few very minor modifications)
+#
+# To do:
+# FIXME: make save work (this requires quantization support)
+#
+from __future__ import annotations
+
+from . import Image, ImageFile, ImagePalette
+from ._binary import o8
+
+_MAGIC = b"P7 332"
+
+# standard color palette for thumbnails (RGB332)
+PALETTE = b""
+for r in range(8):
+ for g in range(8):
+ for b in range(4):
+ PALETTE = PALETTE + (
+ o8((r * 255) // 7) + o8((g * 255) // 7) + o8((b * 255) // 3)
+ )
+
+
+def _accept(prefix):
+ return prefix[:6] == _MAGIC
+
+
+##
+# Image plugin for XV thumbnail images.
+
+
+class XVThumbImageFile(ImageFile.ImageFile):
+ format = "XVThumb"
+ format_description = "XV thumbnail image"
+
+ def _open(self):
+ # check magic
+ if not _accept(self.fp.read(6)):
+ msg = "not an XV thumbnail file"
+ raise SyntaxError(msg)
+
+ # Skip to beginning of next line
+ self.fp.readline()
+
+ # skip info comments
+ while True:
+ s = self.fp.readline()
+ if not s:
+ msg = "Unexpected EOF reading XV thumbnail file"
+ raise SyntaxError(msg)
+ if s[0] != 35: # ie. when not a comment: '#'
+ break
+
+ # parse header line (already read)
+ s = s.strip().split()
+
+ self._mode = "P"
+ self._size = int(s[0]), int(s[1])
+
+ self.palette = ImagePalette.raw("RGB", PALETTE)
+
+ self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), (self.mode, 0, 1))]
+
+
+# --------------------------------------------------------------------
+
+Image.register_open(XVThumbImageFile.format, XVThumbImageFile, _accept)
diff --git a/Lib/site-packages/PIL/XbmImagePlugin.py b/Lib/site-packages/PIL/XbmImagePlugin.py
new file mode 100644
index 0000000..566acbf
--- /dev/null
+++ b/Lib/site-packages/PIL/XbmImagePlugin.py
@@ -0,0 +1,95 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# XBM File handling
+#
+# History:
+# 1995-09-08 fl Created
+# 1996-11-01 fl Added save support
+# 1997-07-07 fl Made header parser more tolerant
+# 1997-07-22 fl Fixed yet another parser bug
+# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4)
+# 2001-05-13 fl Added hotspot handling (based on code from Bernhard Herzog)
+# 2004-02-24 fl Allow some whitespace before first #define
+#
+# Copyright (c) 1997-2004 by Secret Labs AB
+# Copyright (c) 1996-1997 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import re
+
+from . import Image, ImageFile
+
+# XBM header
+xbm_head = re.compile(
+ rb"\s*#define[ \t]+.*_width[ \t]+(?P[0-9]+)[\r\n]+"
+ b"#define[ \t]+.*_height[ \t]+(?P[0-9]+)[\r\n]+"
+ b"(?P"
+ b"#define[ \t]+[^_]*_x_hot[ \t]+(?P[0-9]+)[\r\n]+"
+ b"#define[ \t]+[^_]*_y_hot[ \t]+(?P[0-9]+)[\r\n]+"
+ b")?"
+ rb"[\000-\377]*_bits\[]"
+)
+
+
+def _accept(prefix):
+ return prefix.lstrip()[:7] == b"#define"
+
+
+##
+# Image plugin for X11 bitmaps.
+
+
+class XbmImageFile(ImageFile.ImageFile):
+ format = "XBM"
+ format_description = "X11 Bitmap"
+
+ def _open(self):
+ m = xbm_head.match(self.fp.read(512))
+
+ if not m:
+ msg = "not a XBM file"
+ raise SyntaxError(msg)
+
+ xsize = int(m.group("width"))
+ ysize = int(m.group("height"))
+
+ if m.group("hotspot"):
+ self.info["hotspot"] = (int(m.group("xhot")), int(m.group("yhot")))
+
+ self._mode = "1"
+ self._size = xsize, ysize
+
+ self.tile = [("xbm", (0, 0) + self.size, m.end(), None)]
+
+
+def _save(im, fp, filename):
+ if im.mode != "1":
+ msg = f"cannot write mode {im.mode} as XBM"
+ raise OSError(msg)
+
+ fp.write(f"#define im_width {im.size[0]}\n".encode("ascii"))
+ fp.write(f"#define im_height {im.size[1]}\n".encode("ascii"))
+
+ hotspot = im.encoderinfo.get("hotspot")
+ if hotspot:
+ fp.write(f"#define im_x_hot {hotspot[0]}\n".encode("ascii"))
+ fp.write(f"#define im_y_hot {hotspot[1]}\n".encode("ascii"))
+
+ fp.write(b"static char im_bits[] = {\n")
+
+ ImageFile._save(im, fp, [("xbm", (0, 0) + im.size, 0, None)])
+
+ fp.write(b"};\n")
+
+
+Image.register_open(XbmImageFile.format, XbmImageFile, _accept)
+Image.register_save(XbmImageFile.format, _save)
+
+Image.register_extension(XbmImageFile.format, ".xbm")
+
+Image.register_mime(XbmImageFile.format, "image/xbm")
diff --git a/Lib/site-packages/PIL/XpmImagePlugin.py b/Lib/site-packages/PIL/XpmImagePlugin.py
new file mode 100644
index 0000000..bf73c9b
--- /dev/null
+++ b/Lib/site-packages/PIL/XpmImagePlugin.py
@@ -0,0 +1,128 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# XPM File handling
+#
+# History:
+# 1996-12-29 fl Created
+# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7)
+#
+# Copyright (c) Secret Labs AB 1997-2001.
+# Copyright (c) Fredrik Lundh 1996-2001.
+#
+# See the README file for information on usage and redistribution.
+#
+from __future__ import annotations
+
+import re
+
+from . import Image, ImageFile, ImagePalette
+from ._binary import o8
+
+# XPM header
+xpm_head = re.compile(b'"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)')
+
+
+def _accept(prefix):
+ return prefix[:9] == b"/* XPM */"
+
+
+##
+# Image plugin for X11 pixel maps.
+
+
+class XpmImageFile(ImageFile.ImageFile):
+ format = "XPM"
+ format_description = "X11 Pixel Map"
+
+ def _open(self):
+ if not _accept(self.fp.read(9)):
+ msg = "not an XPM file"
+ raise SyntaxError(msg)
+
+ # skip forward to next string
+ while True:
+ s = self.fp.readline()
+ if not s:
+ msg = "broken XPM file"
+ raise SyntaxError(msg)
+ m = xpm_head.match(s)
+ if m:
+ break
+
+ self._size = int(m.group(1)), int(m.group(2))
+
+ pal = int(m.group(3))
+ bpp = int(m.group(4))
+
+ if pal > 256 or bpp != 1:
+ msg = "cannot read this XPM file"
+ raise ValueError(msg)
+
+ #
+ # load palette description
+
+ palette = [b"\0\0\0"] * 256
+
+ for _ in range(pal):
+ s = self.fp.readline()
+ if s[-2:] == b"\r\n":
+ s = s[:-2]
+ elif s[-1:] in b"\r\n":
+ s = s[:-1]
+
+ c = s[1]
+ s = s[2:-2].split()
+
+ for i in range(0, len(s), 2):
+ if s[i] == b"c":
+ # process colour key
+ rgb = s[i + 1]
+ if rgb == b"None":
+ self.info["transparency"] = c
+ elif rgb[:1] == b"#":
+ # FIXME: handle colour names (see ImagePalette.py)
+ rgb = int(rgb[1:], 16)
+ palette[c] = (
+ o8((rgb >> 16) & 255) + o8((rgb >> 8) & 255) + o8(rgb & 255)
+ )
+ else:
+ # unknown colour
+ msg = "cannot read this XPM file"
+ raise ValueError(msg)
+ break
+
+ else:
+ # missing colour key
+ msg = "cannot read this XPM file"
+ raise ValueError(msg)
+
+ self._mode = "P"
+ self.palette = ImagePalette.raw("RGB", b"".join(palette))
+
+ self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), ("P", 0, 1))]
+
+ def load_read(self, bytes):
+ #
+ # load all image data in one chunk
+
+ xsize, ysize = self.size
+
+ s = [None] * ysize
+
+ for i in range(ysize):
+ s[i] = self.fp.readline()[1 : xsize + 1].ljust(xsize)
+
+ return b"".join(s)
+
+
+#
+# Registry
+
+
+Image.register_open(XpmImageFile.format, XpmImageFile, _accept)
+
+Image.register_extension(XpmImageFile.format, ".xpm")
+
+Image.register_mime(XpmImageFile.format, "image/xpm")
diff --git a/Lib/site-packages/PIL/__init__.py b/Lib/site-packages/PIL/__init__.py
new file mode 100644
index 0000000..3fcac86
--- /dev/null
+++ b/Lib/site-packages/PIL/__init__.py
@@ -0,0 +1,85 @@
+"""Pillow (Fork of the Python Imaging Library)
+
+Pillow is the friendly PIL fork by Jeffrey A. Clark (Alex) and contributors.
+ https://github.com/python-pillow/Pillow/
+
+Pillow is forked from PIL 1.1.7.
+
+PIL is the Python Imaging Library by Fredrik Lundh and contributors.
+Copyright (c) 1999 by Secret Labs AB.
+
+Use PIL.__version__ for this Pillow version.
+
+;-)
+"""
+from __future__ import annotations
+
+from . import _version
+
+# VERSION was removed in Pillow 6.0.0.
+# PILLOW_VERSION was removed in Pillow 9.0.0.
+# Use __version__ instead.
+__version__ = _version.__version__
+del _version
+
+
+_plugins = [
+ "BlpImagePlugin",
+ "BmpImagePlugin",
+ "BufrStubImagePlugin",
+ "CurImagePlugin",
+ "DcxImagePlugin",
+ "DdsImagePlugin",
+ "EpsImagePlugin",
+ "FitsImagePlugin",
+ "FliImagePlugin",
+ "FpxImagePlugin",
+ "FtexImagePlugin",
+ "GbrImagePlugin",
+ "GifImagePlugin",
+ "GribStubImagePlugin",
+ "Hdf5StubImagePlugin",
+ "IcnsImagePlugin",
+ "IcoImagePlugin",
+ "ImImagePlugin",
+ "ImtImagePlugin",
+ "IptcImagePlugin",
+ "JpegImagePlugin",
+ "Jpeg2KImagePlugin",
+ "McIdasImagePlugin",
+ "MicImagePlugin",
+ "MpegImagePlugin",
+ "MpoImagePlugin",
+ "MspImagePlugin",
+ "PalmImagePlugin",
+ "PcdImagePlugin",
+ "PcxImagePlugin",
+ "PdfImagePlugin",
+ "PixarImagePlugin",
+ "PngImagePlugin",
+ "PpmImagePlugin",
+ "PsdImagePlugin",
+ "QoiImagePlugin",
+ "SgiImagePlugin",
+ "SpiderImagePlugin",
+ "SunImagePlugin",
+ "TgaImagePlugin",
+ "TiffImagePlugin",
+ "WebPImagePlugin",
+ "WmfImagePlugin",
+ "XbmImagePlugin",
+ "XpmImagePlugin",
+ "XVThumbImagePlugin",
+]
+
+
+class UnidentifiedImageError(OSError):
+ """
+ Raised in :py:meth:`PIL.Image.open` if an image cannot be opened and identified.
+
+ If a PNG image raises this error, setting :data:`.ImageFile.LOAD_TRUNCATED_IMAGES`
+ to true may allow the image to be opened after all. The setting will ignore missing
+ data and checksum failures.
+ """
+
+ pass
diff --git a/Lib/site-packages/PIL/__main__.py b/Lib/site-packages/PIL/__main__.py
new file mode 100644
index 0000000..9437899
--- /dev/null
+++ b/Lib/site-packages/PIL/__main__.py
@@ -0,0 +1,5 @@
+from __future__ import annotations
+
+from .features import pilinfo
+
+pilinfo()
diff --git a/Lib/site-packages/PIL/_binary.py b/Lib/site-packages/PIL/_binary.py
new file mode 100644
index 0000000..0a07e8d
--- /dev/null
+++ b/Lib/site-packages/PIL/_binary.py
@@ -0,0 +1,102 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# Binary input/output support routines.
+#
+# Copyright (c) 1997-2003 by Secret Labs AB
+# Copyright (c) 1995-2003 by Fredrik Lundh
+# Copyright (c) 2012 by Brian Crowell
+#
+# See the README file for information on usage and redistribution.
+#
+
+
+"""Binary input/output support routines."""
+from __future__ import annotations
+
+from struct import pack, unpack_from
+
+
+def i8(c: bytes) -> int:
+ return c[0]
+
+
+def o8(i: int) -> bytes:
+ return bytes((i & 255,))
+
+
+# Input, le = little endian, be = big endian
+def i16le(c: bytes, o: int = 0) -> int:
+ """
+ Converts a 2-bytes (16 bits) string to an unsigned integer.
+
+ :param c: string containing bytes to convert
+ :param o: offset of bytes to convert in string
+ """
+ return unpack_from(" int:
+ """
+ Converts a 2-bytes (16 bits) string to a signed integer.
+
+ :param c: string containing bytes to convert
+ :param o: offset of bytes to convert in string
+ """
+ return unpack_from(" int:
+ """
+ Converts a 2-bytes (16 bits) string to a signed integer, big endian.
+
+ :param c: string containing bytes to convert
+ :param o: offset of bytes to convert in string
+ """
+ return unpack_from(">h", c, o)[0]
+
+
+def i32le(c: bytes, o: int = 0) -> int:
+ """
+ Converts a 4-bytes (32 bits) string to an unsigned integer.
+
+ :param c: string containing bytes to convert
+ :param o: offset of bytes to convert in string
+ """
+ return unpack_from(" int:
+ """
+ Converts a 4-bytes (32 bits) string to a signed integer.
+
+ :param c: string containing bytes to convert
+ :param o: offset of bytes to convert in string
+ """
+ return unpack_from(" int:
+ return unpack_from(">H", c, o)[0]
+
+
+def i32be(c: bytes, o: int = 0) -> int:
+ return unpack_from(">I", c, o)[0]
+
+
+# Output, le = little endian, be = big endian
+def o16le(i: int) -> bytes:
+ return pack(" bytes:
+ return pack(" bytes:
+ return pack(">H", i)
+
+
+def o32be(i: int) -> bytes:
+ return pack(">I", i)
diff --git a/Lib/site-packages/PIL/_deprecate.py b/Lib/site-packages/PIL/_deprecate.py
new file mode 100644
index 0000000..33a0e07
--- /dev/null
+++ b/Lib/site-packages/PIL/_deprecate.py
@@ -0,0 +1,71 @@
+from __future__ import annotations
+
+import warnings
+
+from . import __version__
+
+
+def deprecate(
+ deprecated: str,
+ when: int | None,
+ replacement: str | None = None,
+ *,
+ action: str | None = None,
+ plural: bool = False,
+) -> None:
+ """
+ Deprecations helper.
+
+ :param deprecated: Name of thing to be deprecated.
+ :param when: Pillow major version to be removed in.
+ :param replacement: Name of replacement.
+ :param action: Instead of "replacement", give a custom call to action
+ e.g. "Upgrade to new thing".
+ :param plural: if the deprecated thing is plural, needing "are" instead of "is".
+
+ Usually of the form:
+
+ "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd).
+ Use [replacement] instead."
+
+ You can leave out the replacement sentence:
+
+ "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd)"
+
+ Or with another call to action:
+
+ "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd).
+ [action]."
+ """
+
+ is_ = "are" if plural else "is"
+
+ if when is None:
+ removed = "a future version"
+ elif when <= int(__version__.split(".")[0]):
+ msg = f"{deprecated} {is_} deprecated and should be removed."
+ raise RuntimeError(msg)
+ elif when == 11:
+ removed = "Pillow 11 (2024-10-15)"
+ elif when == 12:
+ removed = "Pillow 12 (2025-10-15)"
+ else:
+ msg = f"Unknown removal version: {when}. Update {__name__}?"
+ raise ValueError(msg)
+
+ if replacement and action:
+ msg = "Use only one of 'replacement' and 'action'"
+ raise ValueError(msg)
+
+ if replacement:
+ action = f". Use {replacement} instead."
+ elif action:
+ action = f". {action.rstrip('.')}."
+ else:
+ action = ""
+
+ warnings.warn(
+ f"{deprecated} {is_} deprecated and will be removed in {removed}{action}",
+ DeprecationWarning,
+ stacklevel=3,
+ )
diff --git a/Lib/site-packages/PIL/_imaging.cp312-win_amd64.pyd b/Lib/site-packages/PIL/_imaging.cp312-win_amd64.pyd
new file mode 100644
index 0000000..fb151d5
Binary files /dev/null and b/Lib/site-packages/PIL/_imaging.cp312-win_amd64.pyd differ
diff --git a/Lib/site-packages/PIL/_imagingcms.cp312-win_amd64.pyd b/Lib/site-packages/PIL/_imagingcms.cp312-win_amd64.pyd
new file mode 100644
index 0000000..e3c0a83
Binary files /dev/null and b/Lib/site-packages/PIL/_imagingcms.cp312-win_amd64.pyd differ
diff --git a/Lib/site-packages/PIL/_imagingcms.pyi b/Lib/site-packages/PIL/_imagingcms.pyi
new file mode 100644
index 0000000..b023555
--- /dev/null
+++ b/Lib/site-packages/PIL/_imagingcms.pyi
@@ -0,0 +1,5 @@
+from __future__ import annotations
+
+from typing import Any
+
+def __getattr__(name: str) -> Any: ...
diff --git a/Lib/site-packages/PIL/_imagingft.cp312-win_amd64.pyd b/Lib/site-packages/PIL/_imagingft.cp312-win_amd64.pyd
new file mode 100644
index 0000000..a635eca
Binary files /dev/null and b/Lib/site-packages/PIL/_imagingft.cp312-win_amd64.pyd differ
diff --git a/Lib/site-packages/PIL/_imagingft.pyi b/Lib/site-packages/PIL/_imagingft.pyi
new file mode 100644
index 0000000..b023555
--- /dev/null
+++ b/Lib/site-packages/PIL/_imagingft.pyi
@@ -0,0 +1,5 @@
+from __future__ import annotations
+
+from typing import Any
+
+def __getattr__(name: str) -> Any: ...
diff --git a/Lib/site-packages/PIL/_imagingmath.cp312-win_amd64.pyd b/Lib/site-packages/PIL/_imagingmath.cp312-win_amd64.pyd
new file mode 100644
index 0000000..be4a867
Binary files /dev/null and b/Lib/site-packages/PIL/_imagingmath.cp312-win_amd64.pyd differ
diff --git a/Lib/site-packages/PIL/_imagingmorph.cp312-win_amd64.pyd b/Lib/site-packages/PIL/_imagingmorph.cp312-win_amd64.pyd
new file mode 100644
index 0000000..05b07e8
Binary files /dev/null and b/Lib/site-packages/PIL/_imagingmorph.cp312-win_amd64.pyd differ
diff --git a/Lib/site-packages/PIL/_imagingtk.cp312-win_amd64.pyd b/Lib/site-packages/PIL/_imagingtk.cp312-win_amd64.pyd
new file mode 100644
index 0000000..45f7187
Binary files /dev/null and b/Lib/site-packages/PIL/_imagingtk.cp312-win_amd64.pyd differ
diff --git a/Lib/site-packages/PIL/_tkinter_finder.py b/Lib/site-packages/PIL/_tkinter_finder.py
new file mode 100644
index 0000000..03a6eba
--- /dev/null
+++ b/Lib/site-packages/PIL/_tkinter_finder.py
@@ -0,0 +1,19 @@
+""" Find compiled module linking to Tcl / Tk libraries
+"""
+from __future__ import annotations
+
+import sys
+import tkinter
+from tkinter import _tkinter as tk
+
+try:
+ if hasattr(sys, "pypy_find_executable"):
+ TKINTER_LIB = tk.tklib_cffi.__file__
+ else:
+ TKINTER_LIB = tk.__file__
+except AttributeError:
+ # _tkinter may be compiled directly into Python, in which case __file__ is
+ # not available. load_tkinter_funcs will check the binary first in any case.
+ TKINTER_LIB = None
+
+tk_version = str(tkinter.TkVersion)
diff --git a/Lib/site-packages/PIL/_typing.py b/Lib/site-packages/PIL/_typing.py
new file mode 100644
index 0000000..608b2b4
--- /dev/null
+++ b/Lib/site-packages/PIL/_typing.py
@@ -0,0 +1,18 @@
+from __future__ import annotations
+
+import sys
+
+if sys.version_info >= (3, 10):
+ from typing import TypeGuard
+else:
+ try:
+ from typing_extensions import TypeGuard
+ except ImportError:
+ from typing import Any
+
+ class TypeGuard: # type: ignore[no-redef]
+ def __class_getitem__(cls, item: Any) -> type[bool]:
+ return bool
+
+
+__all__ = ["TypeGuard"]
diff --git a/Lib/site-packages/PIL/_util.py b/Lib/site-packages/PIL/_util.py
new file mode 100644
index 0000000..13f369c
--- /dev/null
+++ b/Lib/site-packages/PIL/_util.py
@@ -0,0 +1,32 @@
+from __future__ import annotations
+
+import os
+from pathlib import Path
+from typing import Any, NoReturn
+
+from ._typing import TypeGuard
+
+
+def is_path(f: Any) -> TypeGuard[bytes | str | Path]:
+ return isinstance(f, (bytes, str, Path))
+
+
+def is_directory(f: Any) -> TypeGuard[bytes | str | Path]:
+ """Checks if an object is a string, and that it points to a directory."""
+ return is_path(f) and os.path.isdir(f)
+
+
+class DeferredError:
+ def __init__(self, ex: BaseException):
+ self.ex = ex
+
+ def __getattr__(self, elt: str) -> NoReturn:
+ raise self.ex
+
+ @staticmethod
+ def new(ex: BaseException) -> Any:
+ """
+ Creates an object that raises the wrapped exception ``ex`` when used,
+ and casts it to :py:obj:`~typing.Any` type.
+ """
+ return DeferredError(ex)
diff --git a/Lib/site-packages/PIL/_version.py b/Lib/site-packages/PIL/_version.py
new file mode 100644
index 0000000..1018b96
--- /dev/null
+++ b/Lib/site-packages/PIL/_version.py
@@ -0,0 +1,4 @@
+# Master version for Pillow
+from __future__ import annotations
+
+__version__ = "10.2.0"
diff --git a/Lib/site-packages/PIL/_webp.cp312-win_amd64.pyd b/Lib/site-packages/PIL/_webp.cp312-win_amd64.pyd
new file mode 100644
index 0000000..739bb47
Binary files /dev/null and b/Lib/site-packages/PIL/_webp.cp312-win_amd64.pyd differ
diff --git a/Lib/site-packages/PIL/features.py b/Lib/site-packages/PIL/features.py
new file mode 100644
index 0000000..b14d6df
--- /dev/null
+++ b/Lib/site-packages/PIL/features.py
@@ -0,0 +1,331 @@
+from __future__ import annotations
+
+import collections
+import os
+import sys
+import warnings
+
+import PIL
+
+from . import Image
+
+modules = {
+ "pil": ("PIL._imaging", "PILLOW_VERSION"),
+ "tkinter": ("PIL._tkinter_finder", "tk_version"),
+ "freetype2": ("PIL._imagingft", "freetype2_version"),
+ "littlecms2": ("PIL._imagingcms", "littlecms_version"),
+ "webp": ("PIL._webp", "webpdecoder_version"),
+}
+
+
+def check_module(feature):
+ """
+ Checks if a module is available.
+
+ :param feature: The module to check for.
+ :returns: ``True`` if available, ``False`` otherwise.
+ :raises ValueError: If the module is not defined in this version of Pillow.
+ """
+ if feature not in modules:
+ msg = f"Unknown module {feature}"
+ raise ValueError(msg)
+
+ module, ver = modules[feature]
+
+ try:
+ __import__(module)
+ return True
+ except ModuleNotFoundError:
+ return False
+ except ImportError as ex:
+ warnings.warn(str(ex))
+ return False
+
+
+def version_module(feature):
+ """
+ :param feature: The module to check for.
+ :returns:
+ The loaded version number as a string, or ``None`` if unknown or not available.
+ :raises ValueError: If the module is not defined in this version of Pillow.
+ """
+ if not check_module(feature):
+ return None
+
+ module, ver = modules[feature]
+
+ if ver is None:
+ return None
+
+ return getattr(__import__(module, fromlist=[ver]), ver)
+
+
+def get_supported_modules():
+ """
+ :returns: A list of all supported modules.
+ """
+ return [f for f in modules if check_module(f)]
+
+
+codecs = {
+ "jpg": ("jpeg", "jpeglib"),
+ "jpg_2000": ("jpeg2k", "jp2klib"),
+ "zlib": ("zip", "zlib"),
+ "libtiff": ("libtiff", "libtiff"),
+}
+
+
+def check_codec(feature):
+ """
+ Checks if a codec is available.
+
+ :param feature: The codec to check for.
+ :returns: ``True`` if available, ``False`` otherwise.
+ :raises ValueError: If the codec is not defined in this version of Pillow.
+ """
+ if feature not in codecs:
+ msg = f"Unknown codec {feature}"
+ raise ValueError(msg)
+
+ codec, lib = codecs[feature]
+
+ return codec + "_encoder" in dir(Image.core)
+
+
+def version_codec(feature):
+ """
+ :param feature: The codec to check for.
+ :returns:
+ The version number as a string, or ``None`` if not available.
+ Checked at compile time for ``jpg``, run-time otherwise.
+ :raises ValueError: If the codec is not defined in this version of Pillow.
+ """
+ if not check_codec(feature):
+ return None
+
+ codec, lib = codecs[feature]
+
+ version = getattr(Image.core, lib + "_version")
+
+ if feature == "libtiff":
+ return version.split("\n")[0].split("Version ")[1]
+
+ return version
+
+
+def get_supported_codecs():
+ """
+ :returns: A list of all supported codecs.
+ """
+ return [f for f in codecs if check_codec(f)]
+
+
+features = {
+ "webp_anim": ("PIL._webp", "HAVE_WEBPANIM", None),
+ "webp_mux": ("PIL._webp", "HAVE_WEBPMUX", None),
+ "transp_webp": ("PIL._webp", "HAVE_TRANSPARENCY", None),
+ "raqm": ("PIL._imagingft", "HAVE_RAQM", "raqm_version"),
+ "fribidi": ("PIL._imagingft", "HAVE_FRIBIDI", "fribidi_version"),
+ "harfbuzz": ("PIL._imagingft", "HAVE_HARFBUZZ", "harfbuzz_version"),
+ "libjpeg_turbo": ("PIL._imaging", "HAVE_LIBJPEGTURBO", "libjpeg_turbo_version"),
+ "libimagequant": ("PIL._imaging", "HAVE_LIBIMAGEQUANT", "imagequant_version"),
+ "xcb": ("PIL._imaging", "HAVE_XCB", None),
+}
+
+
+def check_feature(feature):
+ """
+ Checks if a feature is available.
+
+ :param feature: The feature to check for.
+ :returns: ``True`` if available, ``False`` if unavailable, ``None`` if unknown.
+ :raises ValueError: If the feature is not defined in this version of Pillow.
+ """
+ if feature not in features:
+ msg = f"Unknown feature {feature}"
+ raise ValueError(msg)
+
+ module, flag, ver = features[feature]
+
+ try:
+ imported_module = __import__(module, fromlist=["PIL"])
+ return getattr(imported_module, flag)
+ except ModuleNotFoundError:
+ return None
+ except ImportError as ex:
+ warnings.warn(str(ex))
+ return None
+
+
+def version_feature(feature):
+ """
+ :param feature: The feature to check for.
+ :returns: The version number as a string, or ``None`` if not available.
+ :raises ValueError: If the feature is not defined in this version of Pillow.
+ """
+ if not check_feature(feature):
+ return None
+
+ module, flag, ver = features[feature]
+
+ if ver is None:
+ return None
+
+ return getattr(__import__(module, fromlist=[ver]), ver)
+
+
+def get_supported_features():
+ """
+ :returns: A list of all supported features.
+ """
+ return [f for f in features if check_feature(f)]
+
+
+def check(feature):
+ """
+ :param feature: A module, codec, or feature name.
+ :returns:
+ ``True`` if the module, codec, or feature is available,
+ ``False`` or ``None`` otherwise.
+ """
+
+ if feature in modules:
+ return check_module(feature)
+ if feature in codecs:
+ return check_codec(feature)
+ if feature in features:
+ return check_feature(feature)
+ warnings.warn(f"Unknown feature '{feature}'.", stacklevel=2)
+ return False
+
+
+def version(feature):
+ """
+ :param feature:
+ The module, codec, or feature to check for.
+ :returns:
+ The version number as a string, or ``None`` if unknown or not available.
+ """
+ if feature in modules:
+ return version_module(feature)
+ if feature in codecs:
+ return version_codec(feature)
+ if feature in features:
+ return version_feature(feature)
+ return None
+
+
+def get_supported():
+ """
+ :returns: A list of all supported modules, features, and codecs.
+ """
+
+ ret = get_supported_modules()
+ ret.extend(get_supported_features())
+ ret.extend(get_supported_codecs())
+ return ret
+
+
+def pilinfo(out=None, supported_formats=True):
+ """
+ Prints information about this installation of Pillow.
+ This function can be called with ``python3 -m PIL``.
+
+ :param out:
+ The output stream to print to. Defaults to ``sys.stdout`` if ``None``.
+ :param supported_formats:
+ If ``True``, a list of all supported image file formats will be printed.
+ """
+
+ if out is None:
+ out = sys.stdout
+
+ Image.init()
+
+ print("-" * 68, file=out)
+ print(f"Pillow {PIL.__version__}", file=out)
+ py_version = sys.version.splitlines()
+ print(f"Python {py_version[0].strip()}", file=out)
+ for py_version in py_version[1:]:
+ print(f" {py_version.strip()}", file=out)
+ print("-" * 68, file=out)
+ print(
+ f"Python modules loaded from {os.path.dirname(Image.__file__)}",
+ file=out,
+ )
+ print(
+ f"Binary modules loaded from {os.path.dirname(Image.core.__file__)}",
+ file=out,
+ )
+ print("-" * 68, file=out)
+
+ for name, feature in [
+ ("pil", "PIL CORE"),
+ ("tkinter", "TKINTER"),
+ ("freetype2", "FREETYPE2"),
+ ("littlecms2", "LITTLECMS2"),
+ ("webp", "WEBP"),
+ ("transp_webp", "WEBP Transparency"),
+ ("webp_mux", "WEBPMUX"),
+ ("webp_anim", "WEBP Animation"),
+ ("jpg", "JPEG"),
+ ("jpg_2000", "OPENJPEG (JPEG2000)"),
+ ("zlib", "ZLIB (PNG/ZIP)"),
+ ("libtiff", "LIBTIFF"),
+ ("raqm", "RAQM (Bidirectional Text)"),
+ ("libimagequant", "LIBIMAGEQUANT (Quantization method)"),
+ ("xcb", "XCB (X protocol)"),
+ ]:
+ if check(name):
+ if name == "jpg" and check_feature("libjpeg_turbo"):
+ v = "libjpeg-turbo " + version_feature("libjpeg_turbo")
+ else:
+ v = version(name)
+ if v is not None:
+ version_static = name in ("pil", "jpg")
+ if name == "littlecms2":
+ # this check is also in src/_imagingcms.c:setup_module()
+ version_static = tuple(int(x) for x in v.split(".")) < (2, 7)
+ t = "compiled for" if version_static else "loaded"
+ if name == "raqm":
+ for f in ("fribidi", "harfbuzz"):
+ v2 = version_feature(f)
+ if v2 is not None:
+ v += f", {f} {v2}"
+ print("---", feature, "support ok,", t, v, file=out)
+ else:
+ print("---", feature, "support ok", file=out)
+ else:
+ print("***", feature, "support not installed", file=out)
+ print("-" * 68, file=out)
+
+ if supported_formats:
+ extensions = collections.defaultdict(list)
+ for ext, i in Image.EXTENSION.items():
+ extensions[i].append(ext)
+
+ for i in sorted(Image.ID):
+ line = f"{i}"
+ if i in Image.MIME:
+ line = f"{line} {Image.MIME[i]}"
+ print(line, file=out)
+
+ if i in extensions:
+ print(
+ "Extensions: {}".format(", ".join(sorted(extensions[i]))), file=out
+ )
+
+ features = []
+ if i in Image.OPEN:
+ features.append("open")
+ if i in Image.SAVE:
+ features.append("save")
+ if i in Image.SAVE_ALL:
+ features.append("save_all")
+ if i in Image.DECODERS:
+ features.append("decode")
+ if i in Image.ENCODERS:
+ features.append("encode")
+
+ print("Features: {}".format(", ".join(features)), file=out)
+ print("-" * 68, file=out)
diff --git a/Lib/site-packages/_distutils_hack/__init__.py b/Lib/site-packages/_distutils_hack/__init__.py
new file mode 100644
index 0000000..b951c2d
--- /dev/null
+++ b/Lib/site-packages/_distutils_hack/__init__.py
@@ -0,0 +1,227 @@
+# don't import any costly modules
+import sys
+import os
+
+
+is_pypy = '__pypy__' in sys.builtin_module_names
+
+
+def warn_distutils_present():
+ if 'distutils' not in sys.modules:
+ return
+ if is_pypy and sys.version_info < (3, 7):
+ # PyPy for 3.6 unconditionally imports distutils, so bypass the warning
+ # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
+ return
+ import warnings
+
+ warnings.warn(
+ "Distutils was imported before Setuptools, but importing Setuptools "
+ "also replaces the `distutils` module in `sys.modules`. This may lead "
+ "to undesirable behaviors or errors. To avoid these issues, avoid "
+ "using distutils directly, ensure that setuptools is installed in the "
+ "traditional way (e.g. not an editable install), and/or make sure "
+ "that setuptools is always imported before distutils."
+ )
+
+
+def clear_distutils():
+ if 'distutils' not in sys.modules:
+ return
+ import warnings
+
+ warnings.warn("Setuptools is replacing distutils.")
+ mods = [
+ name
+ for name in sys.modules
+ if name == "distutils" or name.startswith("distutils.")
+ ]
+ for name in mods:
+ del sys.modules[name]
+
+
+def enabled():
+ """
+ Allow selection of distutils by environment variable.
+ """
+ which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
+ return which == 'local'
+
+
+def ensure_local_distutils():
+ import importlib
+
+ clear_distutils()
+
+ # With the DistutilsMetaFinder in place,
+ # perform an import to cause distutils to be
+ # loaded from setuptools._distutils. Ref #2906.
+ with shim():
+ importlib.import_module('distutils')
+
+ # check that submodules load as expected
+ core = importlib.import_module('distutils.core')
+ assert '_distutils' in core.__file__, core.__file__
+ assert 'setuptools._distutils.log' not in sys.modules
+
+
+def do_override():
+ """
+ Ensure that the local copy of distutils is preferred over stdlib.
+
+ See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
+ for more motivation.
+ """
+ if enabled():
+ warn_distutils_present()
+ ensure_local_distutils()
+
+
+class _TrivialRe:
+ def __init__(self, *patterns):
+ self._patterns = patterns
+
+ def match(self, string):
+ return all(pat in string for pat in self._patterns)
+
+
+class DistutilsMetaFinder:
+ def find_spec(self, fullname, path, target=None):
+ # optimization: only consider top level modules and those
+ # found in the CPython test suite.
+ if path is not None and not fullname.startswith('test.'):
+ return
+
+ method_name = 'spec_for_{fullname}'.format(**locals())
+ method = getattr(self, method_name, lambda: None)
+ return method()
+
+ def spec_for_distutils(self):
+ if self.is_cpython():
+ return
+
+ import importlib
+ import importlib.abc
+ import importlib.util
+
+ try:
+ mod = importlib.import_module('setuptools._distutils')
+ except Exception:
+ # There are a couple of cases where setuptools._distutils
+ # may not be present:
+ # - An older Setuptools without a local distutils is
+ # taking precedence. Ref #2957.
+ # - Path manipulation during sitecustomize removes
+ # setuptools from the path but only after the hook
+ # has been loaded. Ref #2980.
+ # In either case, fall back to stdlib behavior.
+ return
+
+ class DistutilsLoader(importlib.abc.Loader):
+ def create_module(self, spec):
+ mod.__name__ = 'distutils'
+ return mod
+
+ def exec_module(self, module):
+ pass
+
+ return importlib.util.spec_from_loader(
+ 'distutils', DistutilsLoader(), origin=mod.__file__
+ )
+
+ @staticmethod
+ def is_cpython():
+ """
+ Suppress supplying distutils for CPython (build and tests).
+ Ref #2965 and #3007.
+ """
+ return os.path.isfile('pybuilddir.txt')
+
+ def spec_for_pip(self):
+ """
+ Ensure stdlib distutils when running under pip.
+ See pypa/pip#8761 for rationale.
+ """
+ if sys.version_info >= (3, 12) or self.pip_imported_during_build():
+ return
+ clear_distutils()
+ self.spec_for_distutils = lambda: None
+
+ @classmethod
+ def pip_imported_during_build(cls):
+ """
+ Detect if pip is being imported in a build script. Ref #2355.
+ """
+ import traceback
+
+ return any(
+ cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None)
+ )
+
+ @staticmethod
+ def frame_file_is_setup(frame):
+ """
+ Return True if the indicated frame suggests a setup.py file.
+ """
+ # some frames may not have __file__ (#2940)
+ return frame.f_globals.get('__file__', '').endswith('setup.py')
+
+ def spec_for_sensitive_tests(self):
+ """
+ Ensure stdlib distutils when running select tests under CPython.
+
+ python/cpython#91169
+ """
+ clear_distutils()
+ self.spec_for_distutils = lambda: None
+
+ sensitive_tests = (
+ [
+ 'test.test_distutils',
+ 'test.test_peg_generator',
+ 'test.test_importlib',
+ ]
+ if sys.version_info < (3, 10)
+ else [
+ 'test.test_distutils',
+ ]
+ )
+
+
+for name in DistutilsMetaFinder.sensitive_tests:
+ setattr(
+ DistutilsMetaFinder,
+ f'spec_for_{name}',
+ DistutilsMetaFinder.spec_for_sensitive_tests,
+ )
+
+
+DISTUTILS_FINDER = DistutilsMetaFinder()
+
+
+def add_shim():
+ DISTUTILS_FINDER in sys.meta_path or insert_shim()
+
+
+class shim:
+ def __enter__(self):
+ insert_shim()
+
+ def __exit__(self, exc, value, tb):
+ _remove_shim()
+
+
+def insert_shim():
+ sys.meta_path.insert(0, DISTUTILS_FINDER)
+
+
+def _remove_shim():
+ try:
+ sys.meta_path.remove(DISTUTILS_FINDER)
+ except ValueError:
+ pass
+
+
+if sys.version_info < (3, 12):
+ # DistutilsMetaFinder can only be disabled in Python < 3.12 (PEP 632)
+ remove_shim = _remove_shim
diff --git a/Lib/site-packages/_distutils_hack/override.py b/Lib/site-packages/_distutils_hack/override.py
new file mode 100644
index 0000000..2cc433a
--- /dev/null
+++ b/Lib/site-packages/_distutils_hack/override.py
@@ -0,0 +1 @@
+__import__('_distutils_hack').do_override()
diff --git a/Lib/site-packages/apiclient/__init__.py b/Lib/site-packages/apiclient/__init__.py
new file mode 100644
index 0000000..e7f205a
--- /dev/null
+++ b/Lib/site-packages/apiclient/__init__.py
@@ -0,0 +1,27 @@
+"""Retain apiclient as an alias for googleapiclient."""
+
+from googleapiclient import channel, discovery, errors, http, mimeparse, model
+
+try:
+ from googleapiclient import sample_tools
+except ImportError:
+ # Silently ignore, because the vast majority of consumers won't use it and
+ # it has deep dependence on oauth2client, an optional dependency.
+ sample_tools = None
+from googleapiclient import schema
+
+_SUBMODULES = {
+ "channel": channel,
+ "discovery": discovery,
+ "errors": errors,
+ "http": http,
+ "mimeparse": mimeparse,
+ "model": model,
+ "sample_tools": sample_tools,
+ "schema": schema,
+}
+
+import sys
+
+for module_name, module in _SUBMODULES.items():
+ sys.modules["apiclient.%s" % module_name] = module
diff --git a/Lib/site-packages/blinker-1.7.0.dist-info/INSTALLER b/Lib/site-packages/blinker-1.7.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/Lib/site-packages/blinker-1.7.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Lib/site-packages/blinker-1.7.0.dist-info/LICENSE.rst b/Lib/site-packages/blinker-1.7.0.dist-info/LICENSE.rst
new file mode 100644
index 0000000..79c9825
--- /dev/null
+++ b/Lib/site-packages/blinker-1.7.0.dist-info/LICENSE.rst
@@ -0,0 +1,20 @@
+Copyright 2010 Jason Kirtland
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/Lib/site-packages/blinker-1.7.0.dist-info/METADATA b/Lib/site-packages/blinker-1.7.0.dist-info/METADATA
new file mode 100644
index 0000000..f96613c
--- /dev/null
+++ b/Lib/site-packages/blinker-1.7.0.dist-info/METADATA
@@ -0,0 +1,62 @@
+Metadata-Version: 2.1
+Name: blinker
+Version: 1.7.0
+Summary: Fast, simple object-to-object and broadcast signaling
+Keywords: signal,emit,events,broadcast
+Author-email: Jason Kirtland
+Maintainer-email: Pallets Ecosystem
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Software Development :: Libraries
+Project-URL: Chat, https://discord.gg/pallets
+Project-URL: Documentation, https://blinker.readthedocs.io
+Project-URL: Homepage, https://blinker.readthedocs.io
+Project-URL: Issue Tracker, https://github.com/pallets-eco/blinker/issues/
+Project-URL: Source Code, https://github.com/pallets-eco/blinker/
+
+Blinker
+=======
+
+Blinker provides a fast dispatching system that allows any number of
+interested parties to subscribe to events, or "signals".
+
+Signal receivers can subscribe to specific senders or receive signals
+sent by any sender.
+
+.. code-block:: pycon
+
+ >>> from blinker import signal
+ >>> started = signal('round-started')
+ >>> def each(round):
+ ... print(f"Round {round}")
+ ...
+ >>> started.connect(each)
+
+ >>> def round_two(round):
+ ... print("This is round two.")
+ ...
+ >>> started.connect(round_two, sender=2)
+
+ >>> for round in range(1, 4):
+ ... started.send(round)
+ ...
+ Round 1!
+ Round 2!
+ This is round two.
+ Round 3!
+
+
+Links
+-----
+
+- Documentation: https://blinker.readthedocs.io/
+- Changes: https://blinker.readthedocs.io/#changes
+- PyPI Releases: https://pypi.org/project/blinker/
+- Source Code: https://github.com/pallets-eco/blinker/
+- Issue Tracker: https://github.com/pallets-eco/blinker/issues/
+
diff --git a/Lib/site-packages/blinker-1.7.0.dist-info/RECORD b/Lib/site-packages/blinker-1.7.0.dist-info/RECORD
new file mode 100644
index 0000000..478bcbd
--- /dev/null
+++ b/Lib/site-packages/blinker-1.7.0.dist-info/RECORD
@@ -0,0 +1,14 @@
+blinker-1.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+blinker-1.7.0.dist-info/LICENSE.rst,sha256=nrc6HzhZekqhcCXSrhvjg5Ykx5XphdTw6Xac4p-spGc,1054
+blinker-1.7.0.dist-info/METADATA,sha256=kDgzPgrw4he78pEX88bSAqwYMVWrfUMk8QmNjekjg_U,1918
+blinker-1.7.0.dist-info/RECORD,,
+blinker-1.7.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
+blinker/__init__.py,sha256=s75XaRDHwSDzZ21BZUOEkQDQIcQEyT8hT7vk3EhYFQU,408
+blinker/__pycache__/__init__.cpython-312.pyc,,
+blinker/__pycache__/_saferef.cpython-312.pyc,,
+blinker/__pycache__/_utilities.cpython-312.pyc,,
+blinker/__pycache__/base.cpython-312.pyc,,
+blinker/_saferef.py,sha256=kWOTIWnCY3kOb8lZP74Rbx7bR_BLVg4TjwzNCRLhKHs,9096
+blinker/_utilities.py,sha256=S2njKDmlBpK_yCK4RT8hq98hEj30I0TQCC5mNhtY22I,2856
+blinker/base.py,sha256=FqZmAI5YzuRrvRmye1Jb-utyVOjXtF5vUVP3-1u-HtU,20544
+blinker/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/Lib/site-packages/blinker-1.7.0.dist-info/WHEEL b/Lib/site-packages/blinker-1.7.0.dist-info/WHEEL
new file mode 100644
index 0000000..3b5e64b
--- /dev/null
+++ b/Lib/site-packages/blinker-1.7.0.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: flit 3.9.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/Lib/site-packages/blinker/__init__.py b/Lib/site-packages/blinker/__init__.py
new file mode 100644
index 0000000..d014caa
--- /dev/null
+++ b/Lib/site-packages/blinker/__init__.py
@@ -0,0 +1,19 @@
+from blinker.base import ANY
+from blinker.base import NamedSignal
+from blinker.base import Namespace
+from blinker.base import receiver_connected
+from blinker.base import Signal
+from blinker.base import signal
+from blinker.base import WeakNamespace
+
+__all__ = [
+ "ANY",
+ "NamedSignal",
+ "Namespace",
+ "Signal",
+ "WeakNamespace",
+ "receiver_connected",
+ "signal",
+]
+
+__version__ = "1.7.0"
diff --git a/Lib/site-packages/blinker/_saferef.py b/Lib/site-packages/blinker/_saferef.py
new file mode 100644
index 0000000..dcb70c1
--- /dev/null
+++ b/Lib/site-packages/blinker/_saferef.py
@@ -0,0 +1,230 @@
+# extracted from Louie, http://pylouie.org/
+# updated for Python 3
+#
+# Copyright (c) 2006 Patrick K. O'Brien, Mike C. Fletcher,
+# Matthew R. Scott
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+#
+# * Neither the name of the nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+"""Refactored 'safe reference from dispatcher.py"""
+import operator
+import sys
+import traceback
+import weakref
+
+
+get_self = operator.attrgetter("__self__")
+get_func = operator.attrgetter("__func__")
+
+
+def safe_ref(target, on_delete=None):
+ """Return a *safe* weak reference to a callable target.
+
+ - ``target``: The object to be weakly referenced, if it's a bound
+ method reference, will create a BoundMethodWeakref, otherwise
+ creates a simple weakref.
+
+ - ``on_delete``: If provided, will have a hard reference stored to
+ the callable to be called after the safe reference goes out of
+ scope with the reference object, (either a weakref or a
+ BoundMethodWeakref) as argument.
+ """
+ try:
+ im_self = get_self(target)
+ except AttributeError:
+ if callable(on_delete):
+ return weakref.ref(target, on_delete)
+ else:
+ return weakref.ref(target)
+ else:
+ if im_self is not None:
+ # Turn a bound method into a BoundMethodWeakref instance.
+ # Keep track of these instances for lookup by disconnect().
+ assert hasattr(target, "im_func") or hasattr(target, "__func__"), (
+ f"safe_ref target {target!r} has im_self, but no im_func, "
+ "don't know how to create reference"
+ )
+ reference = BoundMethodWeakref(target=target, on_delete=on_delete)
+ return reference
+
+
+class BoundMethodWeakref:
+ """'Safe' and reusable weak references to instance methods.
+
+ BoundMethodWeakref objects provide a mechanism for referencing a
+ bound method without requiring that the method object itself
+ (which is normally a transient object) is kept alive. Instead,
+ the BoundMethodWeakref object keeps weak references to both the
+ object and the function which together define the instance method.
+
+ Attributes:
+
+ - ``key``: The identity key for the reference, calculated by the
+ class's calculate_key method applied to the target instance method.
+
+ - ``deletion_methods``: Sequence of callable objects taking single
+ argument, a reference to this object which will be called when
+ *either* the target object or target function is garbage
+ collected (i.e. when this object becomes invalid). These are
+ specified as the on_delete parameters of safe_ref calls.
+
+ - ``weak_self``: Weak reference to the target object.
+
+ - ``weak_func``: Weak reference to the target function.
+
+ Class Attributes:
+
+ - ``_all_instances``: Class attribute pointing to all live
+ BoundMethodWeakref objects indexed by the class's
+ calculate_key(target) method applied to the target objects.
+ This weak value dictionary is used to short-circuit creation so
+ that multiple references to the same (object, function) pair
+ produce the same BoundMethodWeakref instance.
+ """
+
+ _all_instances = weakref.WeakValueDictionary() # type: ignore[var-annotated]
+
+ def __new__(cls, target, on_delete=None, *arguments, **named):
+ """Create new instance or return current instance.
+
+ Basically this method of construction allows us to
+ short-circuit creation of references to already-referenced
+ instance methods. The key corresponding to the target is
+ calculated, and if there is already an existing reference,
+ that is returned, with its deletion_methods attribute updated.
+ Otherwise the new instance is created and registered in the
+ table of already-referenced methods.
+ """
+ key = cls.calculate_key(target)
+ current = cls._all_instances.get(key)
+ if current is not None:
+ current.deletion_methods.append(on_delete)
+ return current
+ else:
+ base = super().__new__(cls)
+ cls._all_instances[key] = base
+ base.__init__(target, on_delete, *arguments, **named)
+ return base
+
+ def __init__(self, target, on_delete=None):
+ """Return a weak-reference-like instance for a bound method.
+
+ - ``target``: The instance-method target for the weak reference,
+ must have im_self and im_func attributes and be
+ reconstructable via the following, which is true of built-in
+ instance methods::
+
+ target.im_func.__get__( target.im_self )
+
+ - ``on_delete``: Optional callback which will be called when
+ this weak reference ceases to be valid (i.e. either the
+ object or the function is garbage collected). Should take a
+ single argument, which will be passed a pointer to this
+ object.
+ """
+
+ def remove(weak, self=self):
+ """Set self.isDead to True when method or instance is destroyed."""
+ methods = self.deletion_methods[:]
+ del self.deletion_methods[:]
+ try:
+ del self.__class__._all_instances[self.key]
+ except KeyError:
+ pass
+ for function in methods:
+ try:
+ if callable(function):
+ function(self)
+ except Exception:
+ try:
+ traceback.print_exc()
+ except AttributeError:
+ e = sys.exc_info()[1]
+ print(
+ f"Exception during saferef {self} "
+ f"cleanup function {function}: {e}"
+ )
+
+ self.deletion_methods = [on_delete]
+ self.key = self.calculate_key(target)
+ im_self = get_self(target)
+ im_func = get_func(target)
+ self.weak_self = weakref.ref(im_self, remove)
+ self.weak_func = weakref.ref(im_func, remove)
+ self.self_name = str(im_self)
+ self.func_name = str(im_func.__name__)
+
+ @classmethod
+ def calculate_key(cls, target):
+ """Calculate the reference key for this reference.
+
+ Currently this is a two-tuple of the id()'s of the target
+ object and the target function respectively.
+ """
+ return (id(get_self(target)), id(get_func(target)))
+
+ def __str__(self):
+ """Give a friendly representation of the object."""
+ return "{}({}.{})".format(
+ self.__class__.__name__,
+ self.self_name,
+ self.func_name,
+ )
+
+ __repr__ = __str__
+
+ def __hash__(self):
+ return hash((self.self_name, self.key))
+
+ def __nonzero__(self):
+ """Whether we are still a valid reference."""
+ return self() is not None
+
+ def __eq__(self, other):
+ """Compare with another reference."""
+ if not isinstance(other, self.__class__):
+ return operator.eq(self.__class__, type(other))
+ return operator.eq(self.key, other.key)
+
+ def __call__(self):
+ """Return a strong reference to the bound method.
+
+ If the target cannot be retrieved, then will return None,
+ otherwise returns a bound instance method for our object and
+ function.
+
+ Note: You may call this method any number of times, as it does
+ not invalidate the reference.
+ """
+ target = self.weak_self()
+ if target is not None:
+ function = self.weak_func()
+ if function is not None:
+ return function.__get__(target)
+ return None
diff --git a/Lib/site-packages/blinker/_utilities.py b/Lib/site-packages/blinker/_utilities.py
new file mode 100644
index 0000000..4b711c6
--- /dev/null
+++ b/Lib/site-packages/blinker/_utilities.py
@@ -0,0 +1,105 @@
+from __future__ import annotations
+
+import typing as t
+from weakref import ref
+
+from blinker._saferef import BoundMethodWeakref
+
+IdentityType = t.Union[t.Tuple[int, int], str, int]
+
+
+class _symbol:
+ def __init__(self, name):
+ """Construct a new named symbol."""
+ self.__name__ = self.name = name
+
+ def __reduce__(self):
+ return symbol, (self.name,)
+
+ def __repr__(self):
+ return self.name
+
+
+_symbol.__name__ = "symbol"
+
+
+class symbol:
+ """A constant symbol.
+
+ >>> symbol('foo') is symbol('foo')
+ True
+ >>> symbol('foo')
+ foo
+
+ A slight refinement of the MAGICCOOKIE=object() pattern. The primary
+ advantage of symbol() is its repr(). They are also singletons.
+
+ Repeated calls of symbol('name') will all return the same instance.
+
+ """
+
+ symbols = {} # type: ignore[var-annotated]
+
+ def __new__(cls, name):
+ try:
+ return cls.symbols[name]
+ except KeyError:
+ return cls.symbols.setdefault(name, _symbol(name))
+
+
+def hashable_identity(obj: object) -> IdentityType:
+ if hasattr(obj, "__func__"):
+ return (id(obj.__func__), id(obj.__self__)) # type: ignore[attr-defined]
+ elif hasattr(obj, "im_func"):
+ return (id(obj.im_func), id(obj.im_self)) # type: ignore[attr-defined]
+ elif isinstance(obj, (int, str)):
+ return obj
+ else:
+ return id(obj)
+
+
+WeakTypes = (ref, BoundMethodWeakref)
+
+
+class annotatable_weakref(ref):
+ """A weakref.ref that supports custom instance attributes."""
+
+ receiver_id: t.Optional[IdentityType]
+ sender_id: t.Optional[IdentityType]
+
+
+def reference( # type: ignore[no-untyped-def]
+ object, callback=None, **annotations
+) -> annotatable_weakref:
+ """Return an annotated weak ref."""
+ if callable(object):
+ weak = callable_reference(object, callback)
+ else:
+ weak = annotatable_weakref(object, callback)
+ for key, value in annotations.items():
+ setattr(weak, key, value)
+ return weak # type: ignore[no-any-return]
+
+
+def callable_reference(object, callback=None):
+ """Return an annotated weak ref, supporting bound instance methods."""
+ if hasattr(object, "im_self") and object.im_self is not None:
+ return BoundMethodWeakref(target=object, on_delete=callback)
+ elif hasattr(object, "__self__") and object.__self__ is not None:
+ return BoundMethodWeakref(target=object, on_delete=callback)
+ return annotatable_weakref(object, callback)
+
+
+class lazy_property:
+ """A @property that is only evaluated once."""
+
+ def __init__(self, deferred):
+ self._deferred = deferred
+ self.__doc__ = deferred.__doc__
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self
+ value = self._deferred(obj)
+ setattr(obj, self._deferred.__name__, value)
+ return value
diff --git a/Lib/site-packages/blinker/base.py b/Lib/site-packages/blinker/base.py
new file mode 100644
index 0000000..b9d7035
--- /dev/null
+++ b/Lib/site-packages/blinker/base.py
@@ -0,0 +1,558 @@
+"""Signals and events.
+
+A small implementation of signals, inspired by a snippet of Django signal
+API client code seen in a blog post. Signals are first-class objects and
+each manages its own receivers and message emission.
+
+The :func:`signal` function provides singleton behavior for named signals.
+
+"""
+from __future__ import annotations
+
+import typing as t
+from collections import defaultdict
+from contextlib import contextmanager
+from inspect import iscoroutinefunction
+from warnings import warn
+from weakref import WeakValueDictionary
+
+from blinker._utilities import annotatable_weakref
+from blinker._utilities import hashable_identity
+from blinker._utilities import IdentityType
+from blinker._utilities import lazy_property
+from blinker._utilities import reference
+from blinker._utilities import symbol
+from blinker._utilities import WeakTypes
+
+if t.TYPE_CHECKING:
+ import typing_extensions as te
+
+ T_callable = t.TypeVar("T_callable", bound=t.Callable[..., t.Any])
+
+ T = t.TypeVar("T")
+ P = te.ParamSpec("P")
+
+ AsyncWrapperType = t.Callable[[t.Callable[P, t.Awaitable[T]]], t.Callable[P, T]]
+ SyncWrapperType = t.Callable[[t.Callable[P, T]], t.Callable[P, t.Awaitable[T]]]
+
+ANY = symbol("ANY")
+ANY.__doc__ = 'Token for "any sender".'
+ANY_ID = 0
+
+# NOTE: We need a reference to cast for use in weakref callbacks otherwise
+# t.cast may have already been set to None during finalization.
+cast = t.cast
+
+
+class Signal:
+ """A notification emitter."""
+
+ #: An :obj:`ANY` convenience synonym, allows ``Signal.ANY``
+ #: without an additional import.
+ ANY = ANY
+
+ set_class: type[set] = set
+
+ @lazy_property
+ def receiver_connected(self) -> Signal:
+ """Emitted after each :meth:`connect`.
+
+ The signal sender is the signal instance, and the :meth:`connect`
+ arguments are passed through: *receiver*, *sender*, and *weak*.
+
+ .. versionadded:: 1.2
+
+ """
+ return Signal(doc="Emitted after a receiver connects.")
+
+ @lazy_property
+ def receiver_disconnected(self) -> Signal:
+ """Emitted after :meth:`disconnect`.
+
+ The sender is the signal instance, and the :meth:`disconnect` arguments
+ are passed through: *receiver* and *sender*.
+
+ Note, this signal is emitted **only** when :meth:`disconnect` is
+ called explicitly.
+
+ The disconnect signal can not be emitted by an automatic disconnect
+ (due to a weakly referenced receiver or sender going out of scope),
+ as the receiver and/or sender instances are no longer available for
+ use at the time this signal would be emitted.
+
+ An alternative approach is available by subscribing to
+ :attr:`receiver_connected` and setting up a custom weakref cleanup
+ callback on weak receivers and senders.
+
+ .. versionadded:: 1.2
+
+ """
+ return Signal(doc="Emitted after a receiver disconnects.")
+
+ def __init__(self, doc: str | None = None) -> None:
+ """
+ :param doc: optional. If provided, will be assigned to the signal's
+ __doc__ attribute.
+
+ """
+ if doc:
+ self.__doc__ = doc
+ #: A mapping of connected receivers.
+ #:
+ #: The values of this mapping are not meaningful outside of the
+ #: internal :class:`Signal` implementation, however the boolean value
+ #: of the mapping is useful as an extremely efficient check to see if
+ #: any receivers are connected to the signal.
+ self.receivers: dict[IdentityType, t.Callable | annotatable_weakref] = {}
+ self.is_muted = False
+ self._by_receiver: dict[IdentityType, set[IdentityType]] = defaultdict(
+ self.set_class
+ )
+ self._by_sender: dict[IdentityType, set[IdentityType]] = defaultdict(
+ self.set_class
+ )
+ self._weak_senders: dict[IdentityType, annotatable_weakref] = {}
+
+ def connect(
+ self, receiver: T_callable, sender: t.Any = ANY, weak: bool = True
+ ) -> T_callable:
+ """Connect *receiver* to signal events sent by *sender*.
+
+ :param receiver: A callable. Will be invoked by :meth:`send` with
+ `sender=` as a single positional argument and any ``kwargs`` that
+ were provided to a call to :meth:`send`.
+
+ :param sender: Any object or :obj:`ANY`, defaults to ``ANY``.
+ Restricts notifications delivered to *receiver* to only those
+ :meth:`send` emissions sent by *sender*. If ``ANY``, the receiver
+ will always be notified. A *receiver* may be connected to
+ multiple *sender* values on the same Signal through multiple calls
+ to :meth:`connect`.
+
+ :param weak: If true, the Signal will hold a weakref to *receiver*
+ and automatically disconnect when *receiver* goes out of scope or
+ is garbage collected. Defaults to True.
+
+ """
+ receiver_id = hashable_identity(receiver)
+ receiver_ref: T_callable | annotatable_weakref
+
+ if weak:
+ receiver_ref = reference(receiver, self._cleanup_receiver)
+ receiver_ref.receiver_id = receiver_id
+ else:
+ receiver_ref = receiver
+ sender_id: IdentityType
+ if sender is ANY:
+ sender_id = ANY_ID
+ else:
+ sender_id = hashable_identity(sender)
+
+ self.receivers.setdefault(receiver_id, receiver_ref)
+ self._by_sender[sender_id].add(receiver_id)
+ self._by_receiver[receiver_id].add(sender_id)
+ del receiver_ref
+
+ if sender is not ANY and sender_id not in self._weak_senders:
+ # wire together a cleanup for weakref-able senders
+ try:
+ sender_ref = reference(sender, self._cleanup_sender)
+ sender_ref.sender_id = sender_id
+ except TypeError:
+ pass
+ else:
+ self._weak_senders.setdefault(sender_id, sender_ref)
+ del sender_ref
+
+ # broadcast this connection. if receivers raise, disconnect.
+ if "receiver_connected" in self.__dict__ and self.receiver_connected.receivers:
+ try:
+ self.receiver_connected.send(
+ self, receiver=receiver, sender=sender, weak=weak
+ )
+ except TypeError as e:
+ self.disconnect(receiver, sender)
+ raise e
+ if receiver_connected.receivers and self is not receiver_connected:
+ try:
+ receiver_connected.send(
+ self, receiver_arg=receiver, sender_arg=sender, weak_arg=weak
+ )
+ except TypeError as e:
+ self.disconnect(receiver, sender)
+ raise e
+ return receiver
+
+ def connect_via(
+ self, sender: t.Any, weak: bool = False
+ ) -> t.Callable[[T_callable], T_callable]:
+ """Connect the decorated function as a receiver for *sender*.
+
+ :param sender: Any object or :obj:`ANY`. The decorated function
+ will only receive :meth:`send` emissions sent by *sender*. If
+ ``ANY``, the receiver will always be notified. A function may be
+ decorated multiple times with differing *sender* values.
+
+ :param weak: If true, the Signal will hold a weakref to the
+ decorated function and automatically disconnect when *receiver*
+ goes out of scope or is garbage collected. Unlike
+ :meth:`connect`, this defaults to False.
+
+ The decorated function will be invoked by :meth:`send` with
+ `sender=` as a single positional argument and any ``kwargs`` that
+ were provided to the call to :meth:`send`.
+
+
+ .. versionadded:: 1.1
+
+ """
+
+ def decorator(fn: T_callable) -> T_callable:
+ self.connect(fn, sender, weak)
+ return fn
+
+ return decorator
+
+ @contextmanager
+ def connected_to(
+ self, receiver: t.Callable, sender: t.Any = ANY
+ ) -> t.Generator[None, None, None]:
+ """Execute a block with the signal temporarily connected to *receiver*.
+
+ :param receiver: a receiver callable
+ :param sender: optional, a sender to filter on
+
+ This is a context manager for use in the ``with`` statement. It can
+ be useful in unit tests. *receiver* is connected to the signal for
+ the duration of the ``with`` block, and will be disconnected
+ automatically when exiting the block:
+
+ .. code-block:: python
+
+ with on_ready.connected_to(receiver):
+ # do stuff
+ on_ready.send(123)
+
+ .. versionadded:: 1.1
+
+ """
+ self.connect(receiver, sender=sender, weak=False)
+ try:
+ yield None
+ finally:
+ self.disconnect(receiver)
+
+ @contextmanager
+ def muted(self) -> t.Generator[None, None, None]:
+ """Context manager for temporarily disabling signal.
+ Useful for test purposes.
+ """
+ self.is_muted = True
+ try:
+ yield None
+ except Exception as e:
+ raise e
+ finally:
+ self.is_muted = False
+
+ def temporarily_connected_to(
+ self, receiver: t.Callable, sender: t.Any = ANY
+ ) -> t.ContextManager[None]:
+ """An alias for :meth:`connected_to`.
+
+ :param receiver: a receiver callable
+ :param sender: optional, a sender to filter on
+
+ .. versionadded:: 0.9
+
+ .. versionchanged:: 1.1
+ Renamed to :meth:`connected_to`. ``temporarily_connected_to`` was
+ deprecated in 1.2 and will be removed in a subsequent version.
+
+ """
+ warn(
+ "temporarily_connected_to is deprecated; use connected_to instead.",
+ DeprecationWarning,
+ )
+ return self.connected_to(receiver, sender)
+
+ def send(
+ self,
+ *sender: t.Any,
+ _async_wrapper: AsyncWrapperType | None = None,
+ **kwargs: t.Any,
+ ) -> list[tuple[t.Callable, t.Any]]:
+ """Emit this signal on behalf of *sender*, passing on ``kwargs``.
+
+ Returns a list of 2-tuples, pairing receivers with their return
+ value. The ordering of receiver notification is undefined.
+
+ :param sender: Any object or ``None``. If omitted, synonymous
+ with ``None``. Only accepts one positional argument.
+ :param _async_wrapper: A callable that should wrap a coroutine
+ receiver and run it when called synchronously.
+
+ :param kwargs: Data to be sent to receivers.
+ """
+ if self.is_muted:
+ return []
+
+ sender = self._extract_sender(sender)
+ results = []
+ for receiver in self.receivers_for(sender):
+ if iscoroutinefunction(receiver):
+ if _async_wrapper is None:
+ raise RuntimeError("Cannot send to a coroutine function")
+ receiver = _async_wrapper(receiver)
+ result = receiver(sender, **kwargs)
+ results.append((receiver, result))
+ return results
+
+ async def send_async(
+ self,
+ *sender: t.Any,
+ _sync_wrapper: SyncWrapperType | None = None,
+ **kwargs: t.Any,
+ ) -> list[tuple[t.Callable, t.Any]]:
+ """Emit this signal on behalf of *sender*, passing on ``kwargs``.
+
+ Returns a list of 2-tuples, pairing receivers with their return
+ value. The ordering of receiver notification is undefined.
+
+ :param sender: Any object or ``None``. If omitted, synonymous
+ with ``None``. Only accepts one positional argument.
+ :param _sync_wrapper: A callable that should wrap a synchronous
+ receiver and run it when awaited.
+
+ :param kwargs: Data to be sent to receivers.
+ """
+ if self.is_muted:
+ return []
+
+ sender = self._extract_sender(sender)
+ results = []
+ for receiver in self.receivers_for(sender):
+ if not iscoroutinefunction(receiver):
+ if _sync_wrapper is None:
+ raise RuntimeError("Cannot send to a non-coroutine function")
+ receiver = _sync_wrapper(receiver)
+ result = await receiver(sender, **kwargs)
+ results.append((receiver, result))
+ return results
+
+ def _extract_sender(self, sender: t.Any) -> t.Any:
+ if not self.receivers:
+ # Ensure correct signature even on no-op sends, disable with -O
+ # for lowest possible cost.
+ if __debug__ and sender and len(sender) > 1:
+ raise TypeError(
+ f"send() accepts only one positional argument, {len(sender)} given"
+ )
+ return []
+
+ # Using '*sender' rather than 'sender=None' allows 'sender' to be
+ # used as a keyword argument- i.e. it's an invisible name in the
+ # function signature.
+ if len(sender) == 0:
+ sender = None
+ elif len(sender) > 1:
+ raise TypeError(
+ f"send() accepts only one positional argument, {len(sender)} given"
+ )
+ else:
+ sender = sender[0]
+ return sender
+
+ def has_receivers_for(self, sender: t.Any) -> bool:
+ """True if there is probably a receiver for *sender*.
+
+ Performs an optimistic check only. Does not guarantee that all
+ weakly referenced receivers are still alive. See
+ :meth:`receivers_for` for a stronger search.
+
+ """
+ if not self.receivers:
+ return False
+ if self._by_sender[ANY_ID]:
+ return True
+ if sender is ANY:
+ return False
+ return hashable_identity(sender) in self._by_sender
+
+ def receivers_for(
+ self, sender: t.Any
+ ) -> t.Generator[t.Callable[[t.Any], t.Any], None, None]:
+ """Iterate all live receivers listening for *sender*."""
+ # TODO: test receivers_for(ANY)
+ if self.receivers:
+ sender_id = hashable_identity(sender)
+ if sender_id in self._by_sender:
+ ids = self._by_sender[ANY_ID] | self._by_sender[sender_id]
+ else:
+ ids = self._by_sender[ANY_ID].copy()
+ for receiver_id in ids:
+ receiver = self.receivers.get(receiver_id)
+ if receiver is None:
+ continue
+ if isinstance(receiver, WeakTypes):
+ strong = receiver()
+ if strong is None:
+ self._disconnect(receiver_id, ANY_ID)
+ continue
+ receiver = strong
+ yield receiver # type: ignore[misc]
+
+ def disconnect(self, receiver: t.Callable, sender: t.Any = ANY) -> None:
+ """Disconnect *receiver* from this signal's events.
+
+ :param receiver: a previously :meth:`connected` callable
+
+ :param sender: a specific sender to disconnect from, or :obj:`ANY`
+ to disconnect from all senders. Defaults to ``ANY``.
+
+ """
+ sender_id: IdentityType
+ if sender is ANY:
+ sender_id = ANY_ID
+ else:
+ sender_id = hashable_identity(sender)
+ receiver_id = hashable_identity(receiver)
+ self._disconnect(receiver_id, sender_id)
+
+ if (
+ "receiver_disconnected" in self.__dict__
+ and self.receiver_disconnected.receivers
+ ):
+ self.receiver_disconnected.send(self, receiver=receiver, sender=sender)
+
+ def _disconnect(self, receiver_id: IdentityType, sender_id: IdentityType) -> None:
+ if sender_id == ANY_ID:
+ if self._by_receiver.pop(receiver_id, False):
+ for bucket in self._by_sender.values():
+ bucket.discard(receiver_id)
+ self.receivers.pop(receiver_id, None)
+ else:
+ self._by_sender[sender_id].discard(receiver_id)
+ self._by_receiver[receiver_id].discard(sender_id)
+
+ def _cleanup_receiver(self, receiver_ref: annotatable_weakref) -> None:
+ """Disconnect a receiver from all senders."""
+ self._disconnect(cast(IdentityType, receiver_ref.receiver_id), ANY_ID)
+
+ def _cleanup_sender(self, sender_ref: annotatable_weakref) -> None:
+ """Disconnect all receivers from a sender."""
+ sender_id = cast(IdentityType, sender_ref.sender_id)
+ assert sender_id != ANY_ID
+ self._weak_senders.pop(sender_id, None)
+ for receiver_id in self._by_sender.pop(sender_id, ()):
+ self._by_receiver[receiver_id].discard(sender_id)
+
+ def _cleanup_bookkeeping(self) -> None:
+ """Prune unused sender/receiver bookkeeping. Not threadsafe.
+
+ Connecting & disconnecting leave behind a small amount of bookkeeping
+ for the receiver and sender values. Typical workloads using Blinker,
+ for example in most web apps, Flask, CLI scripts, etc., are not
+ adversely affected by this bookkeeping.
+
+ With a long-running Python process performing dynamic signal routing
+ with high volume- e.g. connecting to function closures, "senders" are
+ all unique object instances, and doing all of this over and over- you
+ may see memory usage will grow due to extraneous bookkeeping. (An empty
+ set() for each stale sender/receiver pair.)
+
+ This method will prune that bookkeeping away, with the caveat that such
+ pruning is not threadsafe. The risk is that cleanup of a fully
+ disconnected receiver/sender pair occurs while another thread is
+ connecting that same pair. If you are in the highly dynamic, unique
+ receiver/sender situation that has lead you to this method, that
+ failure mode is perhaps not a big deal for you.
+ """
+ for mapping in (self._by_sender, self._by_receiver):
+ for _id, bucket in list(mapping.items()):
+ if not bucket:
+ mapping.pop(_id, None)
+
+ def _clear_state(self) -> None:
+ """Throw away all signal state. Useful for unit tests."""
+ self._weak_senders.clear()
+ self.receivers.clear()
+ self._by_sender.clear()
+ self._by_receiver.clear()
+
+
+receiver_connected = Signal(
+ """\
+Sent by a :class:`Signal` after a receiver connects.
+
+:argument: the Signal that was connected to
+:keyword receiver_arg: the connected receiver
+:keyword sender_arg: the sender to connect to
+:keyword weak_arg: true if the connection to receiver_arg is a weak reference
+
+.. deprecated:: 1.2
+
+As of 1.2, individual signals have their own private
+:attr:`~Signal.receiver_connected` and
+:attr:`~Signal.receiver_disconnected` signals with a slightly simplified
+call signature. This global signal is planned to be removed in 1.6.
+
+"""
+)
+
+
+class NamedSignal(Signal):
+ """A named generic notification emitter."""
+
+ def __init__(self, name: str, doc: str | None = None) -> None:
+ Signal.__init__(self, doc)
+
+ #: The name of this signal.
+ self.name = name
+
+ def __repr__(self) -> str:
+ base = Signal.__repr__(self)
+ return f"{base[:-1]}; {self.name!r}>" # noqa: E702
+
+
+class Namespace(dict):
+ """A mapping of signal names to signals."""
+
+ def signal(self, name: str, doc: str | None = None) -> NamedSignal:
+ """Return the :class:`NamedSignal` *name*, creating it if required.
+
+ Repeated calls to this function will return the same signal object.
+
+ """
+ try:
+ return self[name] # type: ignore[no-any-return]
+ except KeyError:
+ result = self.setdefault(name, NamedSignal(name, doc))
+ return result # type: ignore[no-any-return]
+
+
+class WeakNamespace(WeakValueDictionary):
+ """A weak mapping of signal names to signals.
+
+ Automatically cleans up unused Signals when the last reference goes out
+ of scope. This namespace implementation exists for a measure of legacy
+ compatibility with Blinker <= 1.2, and may be dropped in the future.
+
+ .. versionadded:: 1.3
+
+ """
+
+ def signal(self, name: str, doc: str | None = None) -> NamedSignal:
+ """Return the :class:`NamedSignal` *name*, creating it if required.
+
+ Repeated calls to this function will return the same signal object.
+
+ """
+ try:
+ return self[name] # type: ignore[no-any-return]
+ except KeyError:
+ result = self.setdefault(name, NamedSignal(name, doc))
+ return result # type: ignore[no-any-return]
+
+
+signal = Namespace().signal
diff --git a/Lib/site-packages/blinker/py.typed b/Lib/site-packages/blinker/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/cachetools-5.3.2.dist-info/INSTALLER b/Lib/site-packages/cachetools-5.3.2.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/Lib/site-packages/cachetools-5.3.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Lib/site-packages/cachetools-5.3.2.dist-info/LICENSE b/Lib/site-packages/cachetools-5.3.2.dist-info/LICENSE
new file mode 100644
index 0000000..bd185ce
--- /dev/null
+++ b/Lib/site-packages/cachetools-5.3.2.dist-info/LICENSE
@@ -0,0 +1,20 @@
+The MIT License (MIT)
+
+Copyright (c) 2014-2022 Thomas Kemmer
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/Lib/site-packages/cachetools-5.3.2.dist-info/METADATA b/Lib/site-packages/cachetools-5.3.2.dist-info/METADATA
new file mode 100644
index 0000000..9504a94
--- /dev/null
+++ b/Lib/site-packages/cachetools-5.3.2.dist-info/METADATA
@@ -0,0 +1,148 @@
+Metadata-Version: 2.1
+Name: cachetools
+Version: 5.3.2
+Summary: Extensible memoizing collections and decorators
+Home-page: https://github.com/tkem/cachetools/
+Author: Thomas Kemmer
+Author-email: tkemmer@computer.org
+License: MIT
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Other Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >=3.7
+License-File: LICENSE
+
+cachetools
+========================================================================
+
+.. image:: https://img.shields.io/pypi/v/cachetools
+ :target: https://pypi.org/project/cachetools/
+ :alt: Latest PyPI version
+
+.. image:: https://img.shields.io/github/actions/workflow/status/tkem/cachetools/ci.yml
+ :target: https://github.com/tkem/cachetools/actions/workflows/ci.yml
+ :alt: CI build status
+
+.. image:: https://img.shields.io/readthedocs/cachetools
+ :target: https://cachetools.readthedocs.io/
+ :alt: Documentation build status
+
+.. image:: https://img.shields.io/codecov/c/github/tkem/cachetools/master.svg
+ :target: https://codecov.io/gh/tkem/cachetools
+ :alt: Test coverage
+
+.. image:: https://img.shields.io/librariesio/sourcerank/pypi/cachetools
+ :target: https://libraries.io/pypi/cachetools
+ :alt: Libraries.io SourceRank
+
+.. image:: https://img.shields.io/github/license/tkem/cachetools
+ :target: https://raw.github.com/tkem/cachetools/master/LICENSE
+ :alt: License
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: black
+
+
+This module provides various memoizing collections and decorators,
+including variants of the Python Standard Library's `@lru_cache`_
+function decorator.
+
+.. code-block:: python
+
+ from cachetools import cached, LRUCache, TTLCache
+
+ # speed up calculating Fibonacci numbers with dynamic programming
+ @cached(cache={})
+ def fib(n):
+ return n if n < 2 else fib(n - 1) + fib(n - 2)
+
+ # cache least recently used Python Enhancement Proposals
+ @cached(cache=LRUCache(maxsize=32))
+ def get_pep(num):
+ url = 'http://www.python.org/dev/peps/pep-%04d/' % num
+ with urllib.request.urlopen(url) as s:
+ return s.read()
+
+ # cache weather data for no longer than ten minutes
+ @cached(cache=TTLCache(maxsize=1024, ttl=600))
+ def get_weather(place):
+ return owm.weather_at_place(place).get_weather()
+
+For the purpose of this module, a *cache* is a mutable_ mapping_ of a
+fixed maximum size. When the cache is full, i.e. by adding another
+item the cache would exceed its maximum size, the cache must choose
+which item(s) to discard based on a suitable `cache algorithm`_.
+
+This module provides multiple cache classes based on different cache
+algorithms, as well as decorators for easily memoizing function and
+method calls.
+
+
+Installation
+------------------------------------------------------------------------
+
+cachetools is available from PyPI_ and can be installed by running::
+
+ pip install cachetools
+
+Typing stubs for this package are provided by typeshed_ and can be
+installed by running::
+
+ pip install types-cachetools
+
+
+Project Resources
+------------------------------------------------------------------------
+
+- `Documentation`_
+- `Issue tracker`_
+- `Source code`_
+- `Change log`_
+
+
+Related Projects
+------------------------------------------------------------------------
+
+- asyncache_: Helpers to use cachetools with async functions
+- CacheToolsUtils_: Cachetools Utilities
+- `kids.cache`_: Kids caching library
+- shelved-cache_: Persistent cache for Python cachetools
+
+
+License
+------------------------------------------------------------------------
+
+Copyright (c) 2014-2023 Thomas Kemmer.
+
+Licensed under the `MIT License`_.
+
+
+.. _@lru_cache: https://docs.python.org/3/library/functools.html#functools.lru_cache
+.. _mutable: https://docs.python.org/dev/glossary.html#term-mutable
+.. _mapping: https://docs.python.org/dev/glossary.html#term-mapping
+.. _cache algorithm: https://en.wikipedia.org/wiki/Cache_algorithms
+
+.. _PyPI: https://pypi.org/project/cachetools/
+.. _typeshed: https://github.com/python/typeshed/
+.. _Documentation: https://cachetools.readthedocs.io/
+.. _Issue tracker: https://github.com/tkem/cachetools/issues/
+.. _Source code: https://github.com/tkem/cachetools/
+.. _Change log: https://github.com/tkem/cachetools/blob/master/CHANGELOG.rst
+.. _MIT License: https://raw.github.com/tkem/cachetools/master/LICENSE
+
+.. _asyncache: https://pypi.org/project/asyncache/
+.. _CacheToolsUtils: https://pypi.org/project/CacheToolsUtils/
+.. _kids.cache: https://pypi.org/project/kids.cache/
+.. _shelved-cache: https://pypi.org/project/shelved-cache/
diff --git a/Lib/site-packages/cachetools-5.3.2.dist-info/RECORD b/Lib/site-packages/cachetools-5.3.2.dist-info/RECORD
new file mode 100644
index 0000000..b7c6a5d
--- /dev/null
+++ b/Lib/site-packages/cachetools-5.3.2.dist-info/RECORD
@@ -0,0 +1,13 @@
+cachetools-5.3.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+cachetools-5.3.2.dist-info/LICENSE,sha256=diYME3Cn1B1frHGifXgfOt1dckmt-7-pMIRtLZ5H29U,1085
+cachetools-5.3.2.dist-info/METADATA,sha256=t9PO42yhtkE6mN1RhZv61nrqOwSHol_bBL5FL8sr8Jc,5238
+cachetools-5.3.2.dist-info/RECORD,,
+cachetools-5.3.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+cachetools-5.3.2.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
+cachetools-5.3.2.dist-info/top_level.txt,sha256=ai2FH78TGwoBcCgVfoqbzk5IQCtnDukdSs4zKuVPvDs,11
+cachetools/__init__.py,sha256=qXeCCrX_Y0b8i4WDG1ItxgKE1m1RP1kJM1DgKQbxO_E,24981
+cachetools/__pycache__/__init__.cpython-312.pyc,,
+cachetools/__pycache__/func.cpython-312.pyc,,
+cachetools/__pycache__/keys.cpython-312.pyc,,
+cachetools/func.py,sha256=KxCw7akhw-WkltvsfgzkL4XFGxd54srqroKzV3ZP2OM,3616
+cachetools/keys.py,sha256=d-cpW252E_uV50ySlw13IevdNQnSc0MfiMViImQktRI,1613
diff --git a/Lib/site-packages/cachetools-5.3.2.dist-info/REQUESTED b/Lib/site-packages/cachetools-5.3.2.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/cachetools-5.3.2.dist-info/WHEEL b/Lib/site-packages/cachetools-5.3.2.dist-info/WHEEL
new file mode 100644
index 0000000..7e68873
--- /dev/null
+++ b/Lib/site-packages/cachetools-5.3.2.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.41.2)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/Lib/site-packages/cachetools-5.3.2.dist-info/top_level.txt b/Lib/site-packages/cachetools-5.3.2.dist-info/top_level.txt
new file mode 100644
index 0000000..50d1408
--- /dev/null
+++ b/Lib/site-packages/cachetools-5.3.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+cachetools
diff --git a/Lib/site-packages/cachetools/__init__.py b/Lib/site-packages/cachetools/__init__.py
new file mode 100644
index 0000000..61c12be
--- /dev/null
+++ b/Lib/site-packages/cachetools/__init__.py
@@ -0,0 +1,844 @@
+"""Extensible memoizing collections and decorators."""
+
+__all__ = (
+ "Cache",
+ "FIFOCache",
+ "LFUCache",
+ "LRUCache",
+ "MRUCache",
+ "RRCache",
+ "TLRUCache",
+ "TTLCache",
+ "cached",
+ "cachedmethod",
+)
+
+__version__ = "5.3.2"
+
+import collections
+import collections.abc
+import functools
+import heapq
+import random
+import time
+
+from . import keys
+
+
+class _DefaultSize:
+
+ __slots__ = ()
+
+ def __getitem__(self, _):
+ return 1
+
+ def __setitem__(self, _, value):
+ assert value == 1
+
+ def pop(self, _):
+ return 1
+
+
+class Cache(collections.abc.MutableMapping):
+ """Mutable mapping to serve as a simple cache or cache base class."""
+
+ __marker = object()
+
+ __size = _DefaultSize()
+
+ def __init__(self, maxsize, getsizeof=None):
+ if getsizeof:
+ self.getsizeof = getsizeof
+ if self.getsizeof is not Cache.getsizeof:
+ self.__size = dict()
+ self.__data = dict()
+ self.__currsize = 0
+ self.__maxsize = maxsize
+
+ def __repr__(self):
+ return "%s(%s, maxsize=%r, currsize=%r)" % (
+ self.__class__.__name__,
+ repr(self.__data),
+ self.__maxsize,
+ self.__currsize,
+ )
+
+ def __getitem__(self, key):
+ try:
+ return self.__data[key]
+ except KeyError:
+ return self.__missing__(key)
+
+ def __setitem__(self, key, value):
+ maxsize = self.__maxsize
+ size = self.getsizeof(value)
+ if size > maxsize:
+ raise ValueError("value too large")
+ if key not in self.__data or self.__size[key] < size:
+ while self.__currsize + size > maxsize:
+ self.popitem()
+ if key in self.__data:
+ diffsize = size - self.__size[key]
+ else:
+ diffsize = size
+ self.__data[key] = value
+ self.__size[key] = size
+ self.__currsize += diffsize
+
+ def __delitem__(self, key):
+ size = self.__size.pop(key)
+ del self.__data[key]
+ self.__currsize -= size
+
+ def __contains__(self, key):
+ return key in self.__data
+
+ def __missing__(self, key):
+ raise KeyError(key)
+
+ def __iter__(self):
+ return iter(self.__data)
+
+ def __len__(self):
+ return len(self.__data)
+
+ def get(self, key, default=None):
+ if key in self:
+ return self[key]
+ else:
+ return default
+
+ def pop(self, key, default=__marker):
+ if key in self:
+ value = self[key]
+ del self[key]
+ elif default is self.__marker:
+ raise KeyError(key)
+ else:
+ value = default
+ return value
+
+ def setdefault(self, key, default=None):
+ if key in self:
+ value = self[key]
+ else:
+ self[key] = value = default
+ return value
+
+ @property
+ def maxsize(self):
+ """The maximum size of the cache."""
+ return self.__maxsize
+
+ @property
+ def currsize(self):
+ """The current size of the cache."""
+ return self.__currsize
+
+ @staticmethod
+ def getsizeof(value):
+ """Return the size of a cache element's value."""
+ return 1
+
+
+class FIFOCache(Cache):
+ """First In First Out (FIFO) cache implementation."""
+
+ def __init__(self, maxsize, getsizeof=None):
+ Cache.__init__(self, maxsize, getsizeof)
+ self.__order = collections.OrderedDict()
+
+ def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
+ cache_setitem(self, key, value)
+ try:
+ self.__order.move_to_end(key)
+ except KeyError:
+ self.__order[key] = None
+
+ def __delitem__(self, key, cache_delitem=Cache.__delitem__):
+ cache_delitem(self, key)
+ del self.__order[key]
+
+ def popitem(self):
+ """Remove and return the `(key, value)` pair first inserted."""
+ try:
+ key = next(iter(self.__order))
+ except StopIteration:
+ raise KeyError("%s is empty" % type(self).__name__) from None
+ else:
+ return (key, self.pop(key))
+
+
+class LFUCache(Cache):
+ """Least Frequently Used (LFU) cache implementation."""
+
+ def __init__(self, maxsize, getsizeof=None):
+ Cache.__init__(self, maxsize, getsizeof)
+ self.__counter = collections.Counter()
+
+ def __getitem__(self, key, cache_getitem=Cache.__getitem__):
+ value = cache_getitem(self, key)
+ if key in self: # __missing__ may not store item
+ self.__counter[key] -= 1
+ return value
+
+ def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
+ cache_setitem(self, key, value)
+ self.__counter[key] -= 1
+
+ def __delitem__(self, key, cache_delitem=Cache.__delitem__):
+ cache_delitem(self, key)
+ del self.__counter[key]
+
+ def popitem(self):
+ """Remove and return the `(key, value)` pair least frequently used."""
+ try:
+ ((key, _),) = self.__counter.most_common(1)
+ except ValueError:
+ raise KeyError("%s is empty" % type(self).__name__) from None
+ else:
+ return (key, self.pop(key))
+
+
+class LRUCache(Cache):
+ """Least Recently Used (LRU) cache implementation."""
+
+ def __init__(self, maxsize, getsizeof=None):
+ Cache.__init__(self, maxsize, getsizeof)
+ self.__order = collections.OrderedDict()
+
+ def __getitem__(self, key, cache_getitem=Cache.__getitem__):
+ value = cache_getitem(self, key)
+ if key in self: # __missing__ may not store item
+ self.__update(key)
+ return value
+
+ def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
+ cache_setitem(self, key, value)
+ self.__update(key)
+
+ def __delitem__(self, key, cache_delitem=Cache.__delitem__):
+ cache_delitem(self, key)
+ del self.__order[key]
+
+ def popitem(self):
+ """Remove and return the `(key, value)` pair least recently used."""
+ try:
+ key = next(iter(self.__order))
+ except StopIteration:
+ raise KeyError("%s is empty" % type(self).__name__) from None
+ else:
+ return (key, self.pop(key))
+
+ def __update(self, key):
+ try:
+ self.__order.move_to_end(key)
+ except KeyError:
+ self.__order[key] = None
+
+
+class MRUCache(Cache):
+ """Most Recently Used (MRU) cache implementation."""
+
+ def __init__(self, maxsize, getsizeof=None):
+ Cache.__init__(self, maxsize, getsizeof)
+ self.__order = collections.OrderedDict()
+
+ def __getitem__(self, key, cache_getitem=Cache.__getitem__):
+ value = cache_getitem(self, key)
+ if key in self: # __missing__ may not store item
+ self.__update(key)
+ return value
+
+ def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
+ cache_setitem(self, key, value)
+ self.__update(key)
+
+ def __delitem__(self, key, cache_delitem=Cache.__delitem__):
+ cache_delitem(self, key)
+ del self.__order[key]
+
+ def popitem(self):
+ """Remove and return the `(key, value)` pair most recently used."""
+ try:
+ key = next(iter(self.__order))
+ except StopIteration:
+ raise KeyError("%s is empty" % type(self).__name__) from None
+ else:
+ return (key, self.pop(key))
+
+ def __update(self, key):
+ try:
+ self.__order.move_to_end(key, last=False)
+ except KeyError:
+ self.__order[key] = None
+
+
+class RRCache(Cache):
+ """Random Replacement (RR) cache implementation."""
+
+ def __init__(self, maxsize, choice=random.choice, getsizeof=None):
+ Cache.__init__(self, maxsize, getsizeof)
+ self.__choice = choice
+
+ @property
+ def choice(self):
+ """The `choice` function used by the cache."""
+ return self.__choice
+
+ def popitem(self):
+ """Remove and return a random `(key, value)` pair."""
+ try:
+ key = self.__choice(list(self))
+ except IndexError:
+ raise KeyError("%s is empty" % type(self).__name__) from None
+ else:
+ return (key, self.pop(key))
+
+
+class _TimedCache(Cache):
+ """Base class for time aware cache implementations."""
+
+ class _Timer:
+ def __init__(self, timer):
+ self.__timer = timer
+ self.__nesting = 0
+
+ def __call__(self):
+ if self.__nesting == 0:
+ return self.__timer()
+ else:
+ return self.__time
+
+ def __enter__(self):
+ if self.__nesting == 0:
+ self.__time = time = self.__timer()
+ else:
+ time = self.__time
+ self.__nesting += 1
+ return time
+
+ def __exit__(self, *exc):
+ self.__nesting -= 1
+
+ def __reduce__(self):
+ return _TimedCache._Timer, (self.__timer,)
+
+ def __getattr__(self, name):
+ return getattr(self.__timer, name)
+
+ def __init__(self, maxsize, timer=time.monotonic, getsizeof=None):
+ Cache.__init__(self, maxsize, getsizeof)
+ self.__timer = _TimedCache._Timer(timer)
+
+ def __repr__(self, cache_repr=Cache.__repr__):
+ with self.__timer as time:
+ self.expire(time)
+ return cache_repr(self)
+
+ def __len__(self, cache_len=Cache.__len__):
+ with self.__timer as time:
+ self.expire(time)
+ return cache_len(self)
+
+ @property
+ def currsize(self):
+ with self.__timer as time:
+ self.expire(time)
+ return super().currsize
+
+ @property
+ def timer(self):
+ """The timer function used by the cache."""
+ return self.__timer
+
+ def clear(self):
+ with self.__timer as time:
+ self.expire(time)
+ Cache.clear(self)
+
+ def get(self, *args, **kwargs):
+ with self.__timer:
+ return Cache.get(self, *args, **kwargs)
+
+ def pop(self, *args, **kwargs):
+ with self.__timer:
+ return Cache.pop(self, *args, **kwargs)
+
+ def setdefault(self, *args, **kwargs):
+ with self.__timer:
+ return Cache.setdefault(self, *args, **kwargs)
+
+
+class TTLCache(_TimedCache):
+ """LRU Cache implementation with per-item time-to-live (TTL) value."""
+
+ class _Link:
+
+ __slots__ = ("key", "expires", "next", "prev")
+
+ def __init__(self, key=None, expires=None):
+ self.key = key
+ self.expires = expires
+
+ def __reduce__(self):
+ return TTLCache._Link, (self.key, self.expires)
+
+ def unlink(self):
+ next = self.next
+ prev = self.prev
+ prev.next = next
+ next.prev = prev
+
+ def __init__(self, maxsize, ttl, timer=time.monotonic, getsizeof=None):
+ _TimedCache.__init__(self, maxsize, timer, getsizeof)
+ self.__root = root = TTLCache._Link()
+ root.prev = root.next = root
+ self.__links = collections.OrderedDict()
+ self.__ttl = ttl
+
+ def __contains__(self, key):
+ try:
+ link = self.__links[key] # no reordering
+ except KeyError:
+ return False
+ else:
+ return self.timer() < link.expires
+
+ def __getitem__(self, key, cache_getitem=Cache.__getitem__):
+ try:
+ link = self.__getlink(key)
+ except KeyError:
+ expired = False
+ else:
+ expired = not (self.timer() < link.expires)
+ if expired:
+ return self.__missing__(key)
+ else:
+ return cache_getitem(self, key)
+
+ def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
+ with self.timer as time:
+ self.expire(time)
+ cache_setitem(self, key, value)
+ try:
+ link = self.__getlink(key)
+ except KeyError:
+ self.__links[key] = link = TTLCache._Link(key)
+ else:
+ link.unlink()
+ link.expires = time + self.__ttl
+ link.next = root = self.__root
+ link.prev = prev = root.prev
+ prev.next = root.prev = link
+
+ def __delitem__(self, key, cache_delitem=Cache.__delitem__):
+ cache_delitem(self, key)
+ link = self.__links.pop(key)
+ link.unlink()
+ if not (self.timer() < link.expires):
+ raise KeyError(key)
+
+ def __iter__(self):
+ root = self.__root
+ curr = root.next
+ while curr is not root:
+ # "freeze" time for iterator access
+ with self.timer as time:
+ if time < curr.expires:
+ yield curr.key
+ curr = curr.next
+
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+ root = self.__root
+ root.prev = root.next = root
+ for link in sorted(self.__links.values(), key=lambda obj: obj.expires):
+ link.next = root
+ link.prev = prev = root.prev
+ prev.next = root.prev = link
+ self.expire(self.timer())
+
+ @property
+ def ttl(self):
+ """The time-to-live value of the cache's items."""
+ return self.__ttl
+
+ def expire(self, time=None):
+ """Remove expired items from the cache."""
+ if time is None:
+ time = self.timer()
+ root = self.__root
+ curr = root.next
+ links = self.__links
+ cache_delitem = Cache.__delitem__
+ while curr is not root and not (time < curr.expires):
+ cache_delitem(self, curr.key)
+ del links[curr.key]
+ next = curr.next
+ curr.unlink()
+ curr = next
+
+ def popitem(self):
+ """Remove and return the `(key, value)` pair least recently used that
+ has not already expired.
+
+ """
+ with self.timer as time:
+ self.expire(time)
+ try:
+ key = next(iter(self.__links))
+ except StopIteration:
+ raise KeyError("%s is empty" % type(self).__name__) from None
+ else:
+ return (key, self.pop(key))
+
+ def __getlink(self, key):
+ value = self.__links[key]
+ self.__links.move_to_end(key)
+ return value
+
+
+class TLRUCache(_TimedCache):
+ """Time aware Least Recently Used (TLRU) cache implementation."""
+
+ @functools.total_ordering
+ class _Item:
+
+ __slots__ = ("key", "expires", "removed")
+
+ def __init__(self, key=None, expires=None):
+ self.key = key
+ self.expires = expires
+ self.removed = False
+
+ def __lt__(self, other):
+ return self.expires < other.expires
+
+ def __init__(self, maxsize, ttu, timer=time.monotonic, getsizeof=None):
+ _TimedCache.__init__(self, maxsize, timer, getsizeof)
+ self.__items = collections.OrderedDict()
+ self.__order = []
+ self.__ttu = ttu
+
+ def __contains__(self, key):
+ try:
+ item = self.__items[key] # no reordering
+ except KeyError:
+ return False
+ else:
+ return self.timer() < item.expires
+
+ def __getitem__(self, key, cache_getitem=Cache.__getitem__):
+ try:
+ item = self.__getitem(key)
+ except KeyError:
+ expired = False
+ else:
+ expired = not (self.timer() < item.expires)
+ if expired:
+ return self.__missing__(key)
+ else:
+ return cache_getitem(self, key)
+
+ def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
+ with self.timer as time:
+ expires = self.__ttu(key, value, time)
+ if not (time < expires):
+ return # skip expired items
+ self.expire(time)
+ cache_setitem(self, key, value)
+ # removing an existing item would break the heap structure, so
+ # only mark it as removed for now
+ try:
+ self.__getitem(key).removed = True
+ except KeyError:
+ pass
+ self.__items[key] = item = TLRUCache._Item(key, expires)
+ heapq.heappush(self.__order, item)
+
+ def __delitem__(self, key, cache_delitem=Cache.__delitem__):
+ with self.timer as time:
+ # no self.expire() for performance reasons, e.g. self.clear() [#67]
+ cache_delitem(self, key)
+ item = self.__items.pop(key)
+ item.removed = True
+ if not (time < item.expires):
+ raise KeyError(key)
+
+ def __iter__(self):
+ for curr in self.__order:
+ # "freeze" time for iterator access
+ with self.timer as time:
+ if time < curr.expires and not curr.removed:
+ yield curr.key
+
+ @property
+ def ttu(self):
+ """The local time-to-use function used by the cache."""
+ return self.__ttu
+
+ def expire(self, time=None):
+ """Remove expired items from the cache."""
+ if time is None:
+ time = self.timer()
+ items = self.__items
+ order = self.__order
+ # clean up the heap if too many items are marked as removed
+ if len(order) > len(items) * 2:
+ self.__order = order = [item for item in order if not item.removed]
+ heapq.heapify(order)
+ cache_delitem = Cache.__delitem__
+ while order and (order[0].removed or not (time < order[0].expires)):
+ item = heapq.heappop(order)
+ if not item.removed:
+ cache_delitem(self, item.key)
+ del items[item.key]
+
+ def popitem(self):
+ """Remove and return the `(key, value)` pair least recently used that
+ has not already expired.
+
+ """
+ with self.timer as time:
+ self.expire(time)
+ try:
+ key = next(iter(self.__items))
+ except StopIteration:
+ raise KeyError("%s is empty" % self.__class__.__name__) from None
+ else:
+ return (key, self.pop(key))
+
+ def __getitem(self, key):
+ value = self.__items[key]
+ self.__items.move_to_end(key)
+ return value
+
+
+_CacheInfo = collections.namedtuple(
+ "CacheInfo", ["hits", "misses", "maxsize", "currsize"]
+)
+
+
+def cached(cache, key=keys.hashkey, lock=None, info=False):
+ """Decorator to wrap a function with a memoizing callable that saves
+ results in a cache.
+
+ """
+
+ def decorator(func):
+ if info:
+ hits = misses = 0
+
+ if isinstance(cache, Cache):
+
+ def getinfo():
+ nonlocal hits, misses
+ return _CacheInfo(hits, misses, cache.maxsize, cache.currsize)
+
+ elif isinstance(cache, collections.abc.Mapping):
+
+ def getinfo():
+ nonlocal hits, misses
+ return _CacheInfo(hits, misses, None, len(cache))
+
+ else:
+
+ def getinfo():
+ nonlocal hits, misses
+ return _CacheInfo(hits, misses, 0, 0)
+
+ if cache is None:
+
+ def wrapper(*args, **kwargs):
+ nonlocal misses
+ misses += 1
+ return func(*args, **kwargs)
+
+ def cache_clear():
+ nonlocal hits, misses
+ hits = misses = 0
+
+ cache_info = getinfo
+
+ elif lock is None:
+
+ def wrapper(*args, **kwargs):
+ nonlocal hits, misses
+ k = key(*args, **kwargs)
+ try:
+ result = cache[k]
+ hits += 1
+ return result
+ except KeyError:
+ misses += 1
+ v = func(*args, **kwargs)
+ try:
+ cache[k] = v
+ except ValueError:
+ pass # value too large
+ return v
+
+ def cache_clear():
+ nonlocal hits, misses
+ cache.clear()
+ hits = misses = 0
+
+ cache_info = getinfo
+
+ else:
+
+ def wrapper(*args, **kwargs):
+ nonlocal hits, misses
+ k = key(*args, **kwargs)
+ try:
+ with lock:
+ result = cache[k]
+ hits += 1
+ return result
+ except KeyError:
+ with lock:
+ misses += 1
+ v = func(*args, **kwargs)
+ # in case of a race, prefer the item already in the cache
+ try:
+ with lock:
+ return cache.setdefault(k, v)
+ except ValueError:
+ return v # value too large
+
+ def cache_clear():
+ nonlocal hits, misses
+ with lock:
+ cache.clear()
+ hits = misses = 0
+
+ def cache_info():
+ with lock:
+ return getinfo()
+
+ else:
+ if cache is None:
+
+ def wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+
+ def cache_clear():
+ pass
+
+ elif lock is None:
+
+ def wrapper(*args, **kwargs):
+ k = key(*args, **kwargs)
+ try:
+ return cache[k]
+ except KeyError:
+ pass # key not found
+ v = func(*args, **kwargs)
+ try:
+ cache[k] = v
+ except ValueError:
+ pass # value too large
+ return v
+
+ def cache_clear():
+ cache.clear()
+
+ else:
+
+ def wrapper(*args, **kwargs):
+ k = key(*args, **kwargs)
+ try:
+ with lock:
+ return cache[k]
+ except KeyError:
+ pass # key not found
+ v = func(*args, **kwargs)
+ # in case of a race, prefer the item already in the cache
+ try:
+ with lock:
+ return cache.setdefault(k, v)
+ except ValueError:
+ return v # value too large
+
+ def cache_clear():
+ with lock:
+ cache.clear()
+
+ cache_info = None
+
+ wrapper.cache = cache
+ wrapper.cache_key = key
+ wrapper.cache_lock = lock
+ wrapper.cache_clear = cache_clear
+ wrapper.cache_info = cache_info
+
+ return functools.update_wrapper(wrapper, func)
+
+ return decorator
+
+
+def cachedmethod(cache, key=keys.methodkey, lock=None):
+ """Decorator to wrap a class or instance method with a memoizing
+ callable that saves results in a cache.
+
+ """
+
+ def decorator(method):
+ if lock is None:
+
+ def wrapper(self, *args, **kwargs):
+ c = cache(self)
+ if c is None:
+ return method(self, *args, **kwargs)
+ k = key(self, *args, **kwargs)
+ try:
+ return c[k]
+ except KeyError:
+ pass # key not found
+ v = method(self, *args, **kwargs)
+ try:
+ c[k] = v
+ except ValueError:
+ pass # value too large
+ return v
+
+ def clear(self):
+ c = cache(self)
+ if c is not None:
+ c.clear()
+
+ else:
+
+ def wrapper(self, *args, **kwargs):
+ c = cache(self)
+ if c is None:
+ return method(self, *args, **kwargs)
+ k = key(self, *args, **kwargs)
+ try:
+ with lock(self):
+ return c[k]
+ except KeyError:
+ pass # key not found
+ v = method(self, *args, **kwargs)
+ # in case of a race, prefer the item already in the cache
+ try:
+ with lock(self):
+ return c.setdefault(k, v)
+ except ValueError:
+ return v # value too large
+
+ def clear(self):
+ c = cache(self)
+ if c is not None:
+ with lock(self):
+ c.clear()
+
+ wrapper.cache = cache
+ wrapper.cache_key = key
+ wrapper.cache_lock = lock
+ wrapper.cache_clear = clear
+
+ return functools.update_wrapper(wrapper, method)
+
+ return decorator
diff --git a/Lib/site-packages/cachetools/func.py b/Lib/site-packages/cachetools/func.py
new file mode 100644
index 0000000..0c09a60
--- /dev/null
+++ b/Lib/site-packages/cachetools/func.py
@@ -0,0 +1,117 @@
+"""`functools.lru_cache` compatible memoizing function decorators."""
+
+__all__ = ("fifo_cache", "lfu_cache", "lru_cache", "mru_cache", "rr_cache", "ttl_cache")
+
+import math
+import random
+import time
+
+try:
+ from threading import RLock
+except ImportError: # pragma: no cover
+ from dummy_threading import RLock
+
+from . import FIFOCache, LFUCache, LRUCache, MRUCache, RRCache, TTLCache
+from . import cached
+from . import keys
+
+
+class _UnboundTTLCache(TTLCache):
+ def __init__(self, ttl, timer):
+ TTLCache.__init__(self, math.inf, ttl, timer)
+
+ @property
+ def maxsize(self):
+ return None
+
+
+def _cache(cache, maxsize, typed):
+ def decorator(func):
+ key = keys.typedkey if typed else keys.hashkey
+ wrapper = cached(cache=cache, key=key, lock=RLock(), info=True)(func)
+ wrapper.cache_parameters = lambda: {"maxsize": maxsize, "typed": typed}
+ return wrapper
+
+ return decorator
+
+
+def fifo_cache(maxsize=128, typed=False):
+ """Decorator to wrap a function with a memoizing callable that saves
+ up to `maxsize` results based on a First In First Out (FIFO)
+ algorithm.
+
+ """
+ if maxsize is None:
+ return _cache({}, None, typed)
+ elif callable(maxsize):
+ return _cache(FIFOCache(128), 128, typed)(maxsize)
+ else:
+ return _cache(FIFOCache(maxsize), maxsize, typed)
+
+
+def lfu_cache(maxsize=128, typed=False):
+ """Decorator to wrap a function with a memoizing callable that saves
+ up to `maxsize` results based on a Least Frequently Used (LFU)
+ algorithm.
+
+ """
+ if maxsize is None:
+ return _cache({}, None, typed)
+ elif callable(maxsize):
+ return _cache(LFUCache(128), 128, typed)(maxsize)
+ else:
+ return _cache(LFUCache(maxsize), maxsize, typed)
+
+
+def lru_cache(maxsize=128, typed=False):
+ """Decorator to wrap a function with a memoizing callable that saves
+ up to `maxsize` results based on a Least Recently Used (LRU)
+ algorithm.
+
+ """
+ if maxsize is None:
+ return _cache({}, None, typed)
+ elif callable(maxsize):
+ return _cache(LRUCache(128), 128, typed)(maxsize)
+ else:
+ return _cache(LRUCache(maxsize), maxsize, typed)
+
+
+def mru_cache(maxsize=128, typed=False):
+ """Decorator to wrap a function with a memoizing callable that saves
+ up to `maxsize` results based on a Most Recently Used (MRU)
+ algorithm.
+ """
+ if maxsize is None:
+ return _cache({}, None, typed)
+ elif callable(maxsize):
+ return _cache(MRUCache(128), 128, typed)(maxsize)
+ else:
+ return _cache(MRUCache(maxsize), maxsize, typed)
+
+
+def rr_cache(maxsize=128, choice=random.choice, typed=False):
+ """Decorator to wrap a function with a memoizing callable that saves
+ up to `maxsize` results based on a Random Replacement (RR)
+ algorithm.
+
+ """
+ if maxsize is None:
+ return _cache({}, None, typed)
+ elif callable(maxsize):
+ return _cache(RRCache(128, choice), 128, typed)(maxsize)
+ else:
+ return _cache(RRCache(maxsize, choice), maxsize, typed)
+
+
+def ttl_cache(maxsize=128, ttl=600, timer=time.monotonic, typed=False):
+ """Decorator to wrap a function with a memoizing callable that saves
+ up to `maxsize` results based on a Least Recently Used (LRU)
+ algorithm with a per-item time-to-live (TTL) value.
+ """
+ if maxsize is None:
+ return _cache(_UnboundTTLCache(ttl, timer), None, typed)
+ elif callable(maxsize):
+ return _cache(TTLCache(128, ttl, timer), 128, typed)(maxsize)
+ else:
+ return _cache(TTLCache(maxsize, ttl, timer), maxsize, typed)
diff --git a/Lib/site-packages/cachetools/keys.py b/Lib/site-packages/cachetools/keys.py
new file mode 100644
index 0000000..f2feb41
--- /dev/null
+++ b/Lib/site-packages/cachetools/keys.py
@@ -0,0 +1,57 @@
+"""Key functions for memoizing decorators."""
+
+__all__ = ("hashkey", "methodkey", "typedkey")
+
+
+class _HashedTuple(tuple):
+ """A tuple that ensures that hash() will be called no more than once
+ per element, since cache decorators will hash the key multiple
+ times on a cache miss. See also _HashedSeq in the standard
+ library functools implementation.
+
+ """
+
+ __hashvalue = None
+
+ def __hash__(self, hash=tuple.__hash__):
+ hashvalue = self.__hashvalue
+ if hashvalue is None:
+ self.__hashvalue = hashvalue = hash(self)
+ return hashvalue
+
+ def __add__(self, other, add=tuple.__add__):
+ return _HashedTuple(add(self, other))
+
+ def __radd__(self, other, add=tuple.__add__):
+ return _HashedTuple(add(other, self))
+
+ def __getstate__(self):
+ return {}
+
+
+# used for separating keyword arguments; we do not use an object
+# instance here so identity is preserved when pickling/unpickling
+_kwmark = (_HashedTuple,)
+
+
+def hashkey(*args, **kwargs):
+ """Return a cache key for the specified hashable arguments."""
+
+ if kwargs:
+ return _HashedTuple(args + sum(sorted(kwargs.items()), _kwmark))
+ else:
+ return _HashedTuple(args)
+
+
+def methodkey(self, *args, **kwargs):
+ """Return a cache key for use with cached methods."""
+ return hashkey(*args, **kwargs)
+
+
+def typedkey(*args, **kwargs):
+ """Return a typed cache key for the specified hashable arguments."""
+
+ key = hashkey(*args, **kwargs)
+ key += tuple(type(v) for v in args)
+ key += tuple(type(v) for _, v in sorted(kwargs.items()))
+ return key
diff --git a/Lib/site-packages/certifi-2024.2.2.dist-info/INSTALLER b/Lib/site-packages/certifi-2024.2.2.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/Lib/site-packages/certifi-2024.2.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Lib/site-packages/certifi-2024.2.2.dist-info/LICENSE b/Lib/site-packages/certifi-2024.2.2.dist-info/LICENSE
new file mode 100644
index 0000000..62b076c
--- /dev/null
+++ b/Lib/site-packages/certifi-2024.2.2.dist-info/LICENSE
@@ -0,0 +1,20 @@
+This package contains a modified version of ca-bundle.crt:
+
+ca-bundle.crt -- Bundle of CA Root Certificates
+
+This is a bundle of X.509 certificates of public Certificate Authorities
+(CA). These were automatically extracted from Mozilla's root certificates
+file (certdata.txt). This file can be found in the mozilla source tree:
+https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt
+It contains the certificates in PEM format and therefore
+can be directly used with curl / libcurl / php_curl, or with
+an Apache+mod_ssl webserver for SSL client authentication.
+Just configure this file as the SSLCACertificateFile.#
+
+***** BEGIN LICENSE BLOCK *****
+This Source Code Form is subject to the terms of the Mozilla Public License,
+v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
+one at http://mozilla.org/MPL/2.0/.
+
+***** END LICENSE BLOCK *****
+@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $
diff --git a/Lib/site-packages/certifi-2024.2.2.dist-info/METADATA b/Lib/site-packages/certifi-2024.2.2.dist-info/METADATA
new file mode 100644
index 0000000..c688a62
--- /dev/null
+++ b/Lib/site-packages/certifi-2024.2.2.dist-info/METADATA
@@ -0,0 +1,66 @@
+Metadata-Version: 2.1
+Name: certifi
+Version: 2024.2.2
+Summary: Python package for providing Mozilla's CA Bundle.
+Home-page: https://github.com/certifi/python-certifi
+Author: Kenneth Reitz
+Author-email: me@kennethreitz.com
+License: MPL-2.0
+Project-URL: Source, https://github.com/certifi/python-certifi
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
+Classifier: Natural Language :: English
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Requires-Python: >=3.6
+License-File: LICENSE
+
+Certifi: Python SSL Certificates
+================================
+
+Certifi provides Mozilla's carefully curated collection of Root Certificates for
+validating the trustworthiness of SSL certificates while verifying the identity
+of TLS hosts. It has been extracted from the `Requests`_ project.
+
+Installation
+------------
+
+``certifi`` is available on PyPI. Simply install it with ``pip``::
+
+ $ pip install certifi
+
+Usage
+-----
+
+To reference the installed certificate authority (CA) bundle, you can use the
+built-in function::
+
+ >>> import certifi
+
+ >>> certifi.where()
+ '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'
+
+Or from the command line::
+
+ $ python -m certifi
+ /usr/local/lib/python3.7/site-packages/certifi/cacert.pem
+
+Enjoy!
+
+.. _`Requests`: https://requests.readthedocs.io/en/master/
+
+Addition/Removal of Certificates
+--------------------------------
+
+Certifi does not support any addition/removal or other modification of the
+CA trust store content. This project is intended to provide a reliable and
+highly portable root of trust to python deployments. Look to upstream projects
+for methods to use alternate trust.
diff --git a/Lib/site-packages/certifi-2024.2.2.dist-info/RECORD b/Lib/site-packages/certifi-2024.2.2.dist-info/RECORD
new file mode 100644
index 0000000..a6f70c1
--- /dev/null
+++ b/Lib/site-packages/certifi-2024.2.2.dist-info/RECORD
@@ -0,0 +1,15 @@
+certifi-2024.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+certifi-2024.2.2.dist-info/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989
+certifi-2024.2.2.dist-info/METADATA,sha256=1noreLRChpOgeSj0uJT1mehiBl8ngh33Guc7KdvzYYM,2170
+certifi-2024.2.2.dist-info/RECORD,,
+certifi-2024.2.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+certifi-2024.2.2.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
+certifi-2024.2.2.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8
+certifi/__init__.py,sha256=ljtEx-EmmPpTe2SOd5Kzsujm_lUD0fKJVnE9gzce320,94
+certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243
+certifi/__pycache__/__init__.cpython-312.pyc,,
+certifi/__pycache__/__main__.cpython-312.pyc,,
+certifi/__pycache__/core.cpython-312.pyc,,
+certifi/cacert.pem,sha256=ejR8qP724p-CtuR4U1WmY1wX-nVeCUD2XxWqj8e9f5I,292541
+certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426
+certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/Lib/site-packages/certifi-2024.2.2.dist-info/REQUESTED b/Lib/site-packages/certifi-2024.2.2.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/certifi-2024.2.2.dist-info/WHEEL b/Lib/site-packages/certifi-2024.2.2.dist-info/WHEEL
new file mode 100644
index 0000000..98c0d20
--- /dev/null
+++ b/Lib/site-packages/certifi-2024.2.2.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.42.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/Lib/site-packages/certifi-2024.2.2.dist-info/top_level.txt b/Lib/site-packages/certifi-2024.2.2.dist-info/top_level.txt
new file mode 100644
index 0000000..963eac5
--- /dev/null
+++ b/Lib/site-packages/certifi-2024.2.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+certifi
diff --git a/Lib/site-packages/certifi/__init__.py b/Lib/site-packages/certifi/__init__.py
new file mode 100644
index 0000000..1c91f3e
--- /dev/null
+++ b/Lib/site-packages/certifi/__init__.py
@@ -0,0 +1,4 @@
+from .core import contents, where
+
+__all__ = ["contents", "where"]
+__version__ = "2024.02.02"
diff --git a/Lib/site-packages/certifi/__main__.py b/Lib/site-packages/certifi/__main__.py
new file mode 100644
index 0000000..8945b5d
--- /dev/null
+++ b/Lib/site-packages/certifi/__main__.py
@@ -0,0 +1,12 @@
+import argparse
+
+from certifi import contents, where
+
+parser = argparse.ArgumentParser()
+parser.add_argument("-c", "--contents", action="store_true")
+args = parser.parse_args()
+
+if args.contents:
+ print(contents())
+else:
+ print(where())
diff --git a/Lib/site-packages/certifi/cacert.pem b/Lib/site-packages/certifi/cacert.pem
new file mode 100644
index 0000000..fac3c31
--- /dev/null
+++ b/Lib/site-packages/certifi/cacert.pem
@@ -0,0 +1,4814 @@
+
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946069240
+# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
+# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
+# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
+MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
+j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
+U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
+zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
+u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
+bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
+fF6adulZkMV8gzURZVE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2"
+# Serial: 1289
+# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
+# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
+# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
+-----BEGIN CERTIFICATE-----
+MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
+GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
+Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
+WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
+rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
+ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
+Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
+PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
+/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
+oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
+yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
+EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
+A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
+MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
+ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
+BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
+g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
+fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
+WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
+B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
+hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
+TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
+mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
+ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
+4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
+8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3"
+# Serial: 1478
+# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
+# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
+# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
+-----BEGIN CERTIFICATE-----
+MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
+V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
+4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
+H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
+8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
+vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
+mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
+btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
+T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
+WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
+c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
+4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
+VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
+CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
+aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
+aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
+dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
+czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
+A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
+Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
+7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
+d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
+4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
+t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
+DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
+k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
+zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
+Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
+mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
+4SVhM7JZG+Ju1zdXtg2pEto=
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Label: "SwissSign Gold CA - G2"
+# Serial: 13492815561806991280
+# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
+# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
+# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
+-----BEGIN CERTIFICATE-----
+MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
+BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
+biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
+MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
+d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
+76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
+bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
+6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
+emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
+MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
+MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
+MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
+FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
+aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
+gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
+qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
+lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
+8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
+L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
+45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
+UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
+O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
+bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
+GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
+77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
+hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
+92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
+Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
+ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
+Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Label: "SwissSign Silver CA - G2"
+# Serial: 5700383053117599563
+# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
+# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
+# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
+-----BEGIN CERTIFICATE-----
+MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
+BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
+IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
+RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
+U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
+MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
+Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
+YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
+nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
+6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
+eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
+c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
+MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
+HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
+jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
+5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
+rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
+wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
+cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
+AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
+WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
+xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
+2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
+IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
+aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
+em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
+dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
+OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
+tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
+# Subject: CN=SecureTrust CA O=SecureTrust Corporation
+# Label: "SecureTrust CA"
+# Serial: 17199774589125277788362757014266862032
+# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
+# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
+# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
+-----BEGIN CERTIFICATE-----
+MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
+MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
+cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
+Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
+0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
+wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
+7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
+8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
+BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
+JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
+6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
+3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
+D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
+CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
+3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Global CA O=SecureTrust Corporation
+# Subject: CN=Secure Global CA O=SecureTrust Corporation
+# Label: "Secure Global CA"
+# Serial: 9751836167731051554232119481456978597
+# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
+# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
+# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
+-----BEGIN CERTIFICATE-----
+MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
+MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
+Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
+iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
+/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
+jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
+HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
+sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
+gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
+KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
+AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
+URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
+H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
+I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
+iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
+f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna O=Dhimyotis
+# Subject: CN=Certigna O=Dhimyotis
+# Label: "Certigna"
+# Serial: 18364802974209362175
+# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
+# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
+# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
+-----BEGIN CERTIFICATE-----
+MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
+BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
+DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
+BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
+QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
+gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
+zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
+130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
+JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
+ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
+AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
+AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
+9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
+bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
+fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
+HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
+t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
+WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Label: "ePKI Root Certification Authority"
+# Serial: 28956088682735189655030529057352760477
+# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
+# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
+# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
+-----BEGIN CERTIFICATE-----
+MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
+IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
+SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
+SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
+ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
+DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
+TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
+fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
+sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
+WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
+nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
+dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
+NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
+AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
+MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
+ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
+uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
+PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
+JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
+gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
+j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
+5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
+o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
+/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
+Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
+W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
+hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
+-----END CERTIFICATE-----
+
+# Issuer: O=certSIGN OU=certSIGN ROOT CA
+# Subject: O=certSIGN OU=certSIGN ROOT CA
+# Label: "certSIGN ROOT CA"
+# Serial: 35210227249154
+# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
+# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
+# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
+-----BEGIN CERTIFICATE-----
+MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
+AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
+QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
+MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
+0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
+UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
+RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
+OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
+JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
+AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
+BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
+LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
+MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
+44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
+Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
+i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
+9u6wWk5JRFRYX0KD
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny"
+# Serial: 80544274841616
+# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
+# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
+# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
+EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
+MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
+cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
+dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
+pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
+b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
+aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
+IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
+lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
+AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
+VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
+ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
+BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
+AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
+U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
+bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
+bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
+uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
+XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Label: "SecureSign RootCA11"
+# Serial: 1
+# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
+# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
+# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
+-----BEGIN CERTIFICATE-----
+MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
+MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
+A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
+MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
+Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
+QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
+i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
+h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
+MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
+UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
+8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
+h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
+VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
+KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
+X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
+QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
+pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
+QSdJQO7e5iNEOdyhIta6A/I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Label: "Microsec e-Szigno Root CA 2009"
+# Serial: 14014712776195784473
+# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
+# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
+# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
+-----BEGIN CERTIFICATE-----
+MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
+ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
+CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
+OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
+FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
+Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
+dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
+kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
+cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
+fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
+N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
+xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
+Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
+SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
+mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
+ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
+tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
+2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
+HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=Izenpe.com O=IZENPE S.A.
+# Subject: CN=Izenpe.com O=IZENPE S.A.
+# Label: "Izenpe.com"
+# Serial: 917563065490389241595536686991402621
+# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
+# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
+# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
+-----BEGIN CERTIFICATE-----
+MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
+MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
+ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
+VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
+b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
+scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
+xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
+LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
+uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
+yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
+rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
+BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
+hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
+QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
+HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
+Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
+QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
+BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
+MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
+A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
+laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
+awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
+JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
+LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
+VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
+LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
+UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
+QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
+QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA"
+# Serial: 279744
+# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
+# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
+# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
+-----BEGIN CERTIFICATE-----
+MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
+MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
+ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
+cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
+WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
+Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
+IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
+UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
+TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
+BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
+kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
+AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
+HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
+sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
+I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
+J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
+VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
+03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Root Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
+# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
+# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
+MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
+V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
+WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
+LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
+AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
+K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
+RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
+rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
+3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
+hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
+MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
+XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
+lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
+aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
+YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Label: "Security Communication RootCA2"
+# Serial: 0
+# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
+# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
+# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
+DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
+dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
+YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
+OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
+zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
+VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
+hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
+ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
+awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
+OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
+DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
+coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
+okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
+t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
+1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
+SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
+-----END CERTIFICATE-----
+
+# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Label: "Actalis Authentication Root CA"
+# Serial: 6271844772424770508
+# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
+# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
+# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
+-----BEGIN CERTIFICATE-----
+MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
+BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
+MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
+IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
+SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
+ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
+UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
+4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
+KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
+gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
+rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
+51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
+be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
+KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
+v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
+fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
+jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
+ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
+ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
+e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
+jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
+WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
+SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
+pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
+X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
+fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
+K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
+ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
+LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
+LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 2 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
+# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
+# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
+6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
+L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
+1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
+MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
+QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
+arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
+Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
+FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
+P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
+9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
+uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
+9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
+A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
+OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
+KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
+DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
+H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
+I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
+5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
+3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
+Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 3 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
+# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
+# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
+ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
+N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
+tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
+0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
+/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
+KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
+zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
+O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
+34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
+K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
+Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
+QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
+cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
+IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
+HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
+O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
+033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
+dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
+kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
+3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
+u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
+4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 3"
+# Serial: 1
+# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
+# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
+# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
+8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
+RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
+hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
+ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
+EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
+A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
+WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
+1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
+6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
+91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
+e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
+TpPDpFQUWw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 2009"
+# Serial: 623603
+# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
+# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
+# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
+-----BEGIN CERTIFICATE-----
+MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
+ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
+HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
+UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
+tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
+ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
+lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
+/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
+A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
+A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
+dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
+MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
+cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
+L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
+BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
+acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
+o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
+zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
+PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
+Johw1+qRzT65ysCQblrGXnRl11z+o+I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
+# Serial: 623604
+# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
+# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
+# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
+NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
+BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
+ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
+3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
+qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
+p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
+HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
+ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
+HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
+Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
+c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
+RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
+dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
+Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
+3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
+nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
+CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
+xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
+KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig Root R2 O=Disig a.s.
+# Subject: CN=CA Disig Root R2 O=Disig a.s.
+# Label: "CA Disig Root R2"
+# Serial: 10572350602393338211
+# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
+# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
+# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
+-----BEGIN CERTIFICATE-----
+MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
+BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
+MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
+MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
+EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
+ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
+NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
+PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
+x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
+QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
+yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
+QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
+H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
+QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
+i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
+nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
+rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
+DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
+hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
+tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
+GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
+lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
+TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
+nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
+gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
+G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
+zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
+L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
+-----END CERTIFICATE-----
+
+# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Label: "ACCVRAIZ1"
+# Serial: 6828503384748696800
+# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
+# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
+# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
+-----BEGIN CERTIFICATE-----
+MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
+AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
+CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
+BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
+VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
+qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
+HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
+G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
+lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
+IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
+0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
+k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
+4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
+m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
+cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
+uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
+KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
+ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
+AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
+VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
+VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
+CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
+cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
+QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
+7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
+cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
+QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
+czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
+aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
+aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
+DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
+BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
+D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
+JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
+AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
+vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
+tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
+7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
+I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
+h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
+d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
+pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Global Root CA"
+# Serial: 3262
+# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
+# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
+# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
+EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
+VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
+NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
+B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
+10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
+0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
+MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
+zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
+46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
+yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
+laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
+oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
+BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
+qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
+4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
+1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
+LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
+H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
+RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
+nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
+15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
+6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
+nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
+wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
+aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
+KwbQBM0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Label: "TeliaSonera Root CA v1"
+# Serial: 199041966741090107964904287217786801558
+# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
+# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
+# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
+-----BEGIN CERTIFICATE-----
+MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
+NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
+b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
+VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
+VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
+7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
+Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
+/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
+81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
+dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
+Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
+sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
+pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
+slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
+arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
+VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
+9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
+dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
+0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
+TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
+Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
+Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
+OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
+vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
+t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
+HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
+SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 2"
+# Serial: 1
+# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
+# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
+# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
+AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
+FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
+1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
+jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
+wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
+WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
+NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
+uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
+IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
+g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
+9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
+BSeOE6Fuwg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot 2011 O=Atos
+# Subject: CN=Atos TrustedRoot 2011 O=Atos
+# Label: "Atos TrustedRoot 2011"
+# Serial: 6643877497813316402
+# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
+# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
+# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
+AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
+EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
+FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
+REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
+Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
+VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
+SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
+4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
+cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
+eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
+A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
+DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
+vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
+DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
+maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
+lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
+KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 1 G3"
+# Serial: 687049649626669250736271037606554624078720034195
+# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
+# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
+# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
+MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
+wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
+rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
+68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
+4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
+UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
+abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
+3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
+KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
+hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
+Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
+zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
+ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
+MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
+cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
+qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
+YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
+b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
+8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
+NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
+ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
+q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
+nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2 G3"
+# Serial: 390156079458959257446133169266079962026824725800
+# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
+# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
+# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
+MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
+qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
+n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
+c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
+O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
+o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
+IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
+IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
+8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
+vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
+7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
+cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
+ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
+AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
+roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
+W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
+lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
+csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
+dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
+KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
+HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
+WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3 G3"
+# Serial: 268090761170461462463995952157327242137089239581
+# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
+# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
+# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
+MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
+/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
+FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
+U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
+ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
+FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
+A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
+eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
+sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
+VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
+A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
+ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
+ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
+KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
+FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
+oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
+u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
+0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
+3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
+8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
+DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
+PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
+ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G2"
+# Serial: 15385348160840213938643033620894905419
+# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
+# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
+# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
+-----BEGIN CERTIFICATE-----
+MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
+n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
+biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
+EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
+bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
+YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
+AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
+BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
+QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
+0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
+lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
+B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
+ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
+IhNzbM8m9Yop5w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G3"
+# Serial: 15459312981008553731928384953135426796
+# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
+# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
+# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
+-----BEGIN CERTIFICATE-----
+MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
+RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
+Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
+RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
+AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
+JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
+6pZjamVFkpUBtA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G2"
+# Serial: 4293743540046975378534879503202253541
+# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
+# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
+# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G3"
+# Serial: 7089244469030293291760083333884364146
+# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
+# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
+# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
+-----BEGIN CERTIFICATE-----
+MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
+Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
+EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
+IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
+fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
+Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
+BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
+AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
+oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
+sycX
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Trusted Root G4"
+# Serial: 7451500558977370777930084869016614236
+# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
+# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
+# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
+-----BEGIN CERTIFICATE-----
+MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
+RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
+ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
+xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
+ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
+DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
+jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
+CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
+EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
+fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
+uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
+chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
+9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
+ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
+SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
+fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
+sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
+cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
+0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
+4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
+r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
+/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
+gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Label: "COMODO RSA Certification Authority"
+# Serial: 101909084537582093308941363524873193117
+# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
+# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
+# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
+-----BEGIN CERTIFICATE-----
+MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
+hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
+BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
+EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
+Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
+6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
+pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
+9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
+/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
+Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
+qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
+SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
+u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
+Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
+crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
+FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
+/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
+wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
+4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
+2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
+FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
+CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
+boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
+jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
+S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
+QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
+0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
+NVOFBkpdn627G190
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Label: "USERTrust RSA Certification Authority"
+# Serial: 2645093764781058787591871645665788717
+# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
+# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
+# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
+-----BEGIN CERTIFICATE-----
+MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
+iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
+cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
+BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
+MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
+BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
+aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
+3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
+tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
+Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
+VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
+79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
+c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
+Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
+c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
+UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
+Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
+BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
+A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
+Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
+VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
+ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
+8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
+iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
+Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
+XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
+qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
+VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
+L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
+jjxDah2nGN59PRbxYvnKkKj9
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Label: "USERTrust ECC Certification Authority"
+# Serial: 123013823720199481456569720443997572134
+# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
+# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
+# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
+-----BEGIN CERTIFICATE-----
+MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
+MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
+eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
+JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
+Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
+VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
+I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
+o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
+A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
+zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
+RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Label: "GlobalSign ECC Root CA - R5"
+# Serial: 32785792099990507226680698011560947931244
+# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
+# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
+# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
+-----BEGIN CERTIFICATE-----
+MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
+8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
+hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
+KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
+515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
+xwy8p2Fp8fc74SrL+SvzZpA3
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Label: "IdenTrust Commercial Root CA 1"
+# Serial: 13298821034946342390520003877796839426
+# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
+# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
+# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
+VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
+MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
+JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
+3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
+S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
+bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
+T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
+vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
+Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
+dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
+c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
+l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
+iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
+ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
+6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
+LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
+nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
+W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
+AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
+l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
+4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
+mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
+7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Label: "IdenTrust Public Sector Root CA 1"
+# Serial: 13298821034946342390521976156843933698
+# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
+# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
+# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
+VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
+MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
+MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
+ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
+RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
+bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
+/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
+3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
+EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
+9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
+GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
+2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
+WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
+W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
+AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
+t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
+DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
+TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
+lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
+mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
+WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
+tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
+GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
+8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G2"
+# Serial: 1246989352
+# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2
+# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4
+# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39
+-----BEGIN CERTIFICATE-----
+MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
+cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
+IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
+dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
+NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
+dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
+dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
+aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
+RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
+cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
+wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
+U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
+jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
+BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
+jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
+Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
+1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
+nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
+VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - EC1"
+# Serial: 51543124481930649114116133369
+# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc
+# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47
+# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5
+-----BEGIN CERTIFICATE-----
+MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
+A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
+d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
+dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
+RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
+MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
+VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
+L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
+Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
+ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
+A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
+ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
+Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
+R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
+hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
+-----END CERTIFICATE-----
+
+# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Label: "CFCA EV ROOT"
+# Serial: 407555286
+# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
+# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
+# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
+-----BEGIN CERTIFICATE-----
+MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
+TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
+MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
+aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
+T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
+sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
+TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
+/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
+7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
+EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
+hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
+a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
+aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
+TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
+PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
+cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
+tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
+BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
+ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
+ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
+jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
+ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
+P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
+xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
+Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
+5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
+/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
+AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
+5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GB CA"
+# Serial: 157768595616588414422159278966750757568
+# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d
+# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed
+# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6
+-----BEGIN CERTIFICATE-----
+MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt
+MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg
+Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i
+YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x
+CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG
+b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh
+bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3
+HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx
+WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX
+1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk
+u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P
+99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r
+M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB
+BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh
+cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5
+gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO
+ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf
+aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic
+Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Label: "SZAFIR ROOT CA2"
+# Serial: 357043034767186914217277344587386743377558296292
+# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99
+# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de
+# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe
+-----BEGIN CERTIFICATE-----
+MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL
+BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6
+ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw
+NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L
+cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg
+Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN
+QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT
+3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw
+3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6
+3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5
+BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN
+XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF
+AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw
+8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG
+nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP
+oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy
+d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg
+LvWpCz/UXeHPhJ/iGcJfitYgHuNztw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA 2"
+# Serial: 44979900017204383099463764357512596969
+# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2
+# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92
+# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04
+-----BEGIN CERTIFICATE-----
+MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB
+gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu
+QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG
+A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz
+OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ
+VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3
+b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA
+DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn
+0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB
+OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE
+fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E
+Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m
+o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i
+sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW
+OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez
+Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS
+adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n
+3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC
+AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ
+F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf
+CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29
+XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm
+djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/
+WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb
+AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq
+P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko
+b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj
+XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P
+5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi
+DrW5viSP
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce
+# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6
+# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36
+-----BEGIN CERTIFICATE-----
+MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix
+DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k
+IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT
+N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v
+dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG
+A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh
+ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx
+QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA
+4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0
+AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10
+4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C
+ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV
+9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD
+gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6
+Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq
+NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko
+LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc
+Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd
+ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I
+XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI
+M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot
+9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V
+Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea
+j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh
+X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ
+l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf
+bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4
+pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK
+e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0
+vm9qp/UsQu0yrbYhnr68
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef
+# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66
+# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33
+-----BEGIN CERTIFICATE-----
+MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN
+BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl
+bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv
+b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ
+BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj
+YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5
+MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0
+dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg
+QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa
+jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC
+MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi
+C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep
+lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof
+TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X1 O=Internet Security Research Group
+# Subject: CN=ISRG Root X1 O=Internet Security Research Group
+# Label: "ISRG Root X1"
+# Serial: 172886928669790476064670243504169061120
+# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e
+# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8
+# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
+TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
+cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
+WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
+ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
+h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
+0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
+A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
+T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
+B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
+B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
+KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
+OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
+jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
+qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
+rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
+hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
+ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
+3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
+NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
+ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
+TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
+jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
+oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
+4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
+mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
+emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
+-----END CERTIFICATE-----
+
+# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Label: "AC RAIZ FNMT-RCM"
+# Serial: 485876308206448804701554682760554759
+# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d
+# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20
+# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx
+CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ
+WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ
+BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG
+Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/
+yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf
+BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz
+WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF
+tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z
+374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC
+IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL
+mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7
+wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS
+MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2
+ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet
+UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H
+YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3
+LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD
+nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1
+RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM
+LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf
+77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N
+JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm
+fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp
+6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp
+1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B
+9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok
+RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv
+uu8wd+RU4riEmViAqhOLUTpPSPaLtrM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 1 O=Amazon
+# Subject: CN=Amazon Root CA 1 O=Amazon
+# Label: "Amazon Root CA 1"
+# Serial: 143266978916655856878034712317230054538369994
+# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6
+# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16
+# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e
+-----BEGIN CERTIFICATE-----
+MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj
+ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM
+9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw
+IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6
+VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L
+93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm
+jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA
+A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI
+U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs
+N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv
+o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU
+5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy
+rqXRfboQnoZsG4q5WTP468SQvvG5
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 2 O=Amazon
+# Subject: CN=Amazon Root CA 2 O=Amazon
+# Label: "Amazon Root CA 2"
+# Serial: 143266982885963551818349160658925006970653239
+# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66
+# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a
+# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK
+gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ
+W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg
+1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K
+8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r
+2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me
+z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR
+8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj
+mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz
+7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6
++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI
+0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm
+UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2
+LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY
++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS
+k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl
+7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm
+btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl
+urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+
+fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63
+n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE
+76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H
+9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT
+4PsJYGw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 3 O=Amazon
+# Subject: CN=Amazon Root CA 3 O=Amazon
+# Label: "Amazon Root CA 3"
+# Serial: 143266986699090766294700635381230934788665930
+# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87
+# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e
+# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4
+-----BEGIN CERTIFICATE-----
+MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl
+ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr
+ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr
+BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM
+YyRIHN8wfdVoOw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 4 O=Amazon
+# Subject: CN=Amazon Root CA 4 O=Amazon
+# Label: "Amazon Root CA 4"
+# Serial: 143266989758080763974105200630763877849284878
+# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd
+# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be
+# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92
+-----BEGIN CERTIFICATE-----
+MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi
+9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk
+M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB
+MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw
+CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW
+1KyLa2tJElMzrdfkviT8tQp21KW8EA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1"
+# Serial: 1
+# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49
+# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca
+# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16
+-----BEGIN CERTIFICATE-----
+MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx
+GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp
+bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w
+KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0
+BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy
+dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG
+EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll
+IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU
+QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT
+TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg
+LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7
+a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr
+LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr
+N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X
+YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/
+iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f
+AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH
+V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh
+AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf
+IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4
+lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c
+8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf
+lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Label: "GDCA TrustAUTH R5 ROOT"
+# Serial: 9009899650740120186
+# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4
+# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4
+# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93
+-----BEGIN CERTIFICATE-----
+MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE
+BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ
+IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0
+MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV
+BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w
+HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj
+Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj
+TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u
+KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj
+qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm
+MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12
+ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP
+zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk
+L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC
+jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA
+HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC
+AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg
+p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm
+DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5
+COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry
+L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf
+JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg
+IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io
+2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV
+09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ
+XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq
+T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe
+MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Label: "SSL.com Root Certification Authority RSA"
+# Serial: 8875640296558310041
+# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29
+# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb
+# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69
+-----BEGIN CERTIFICATE-----
+MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE
+BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK
+DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz
+OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv
+bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R
+xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX
+qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC
+C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3
+6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh
+/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF
+YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E
+JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc
+US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8
+ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm
++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi
+M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G
+A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV
+cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc
+Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs
+PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/
+q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0
+cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr
+a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I
+H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y
+K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu
+nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf
+oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY
+Ic2wBlX7Jz9TkHCpBB5XJ7k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com Root Certification Authority ECC"
+# Serial: 8495723813297216424
+# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e
+# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a
+# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65
+-----BEGIN CERTIFICATE-----
+MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz
+WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0
+b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS
+b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI
+7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg
+CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud
+EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD
+VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T
+kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+
+gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority RSA R2"
+# Serial: 6248227494352943350
+# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95
+# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a
+# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c
+-----BEGIN CERTIFICATE-----
+MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV
+BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE
+CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy
+MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G
+A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD
+DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq
+M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf
+OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa
+4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9
+HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR
+aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA
+b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ
+Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV
+PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO
+pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu
+UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY
+MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV
+HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4
+9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW
+s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5
+Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg
+cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM
+79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz
+/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt
+ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm
+Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK
+QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ
+w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi
+S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07
+mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority ECC"
+# Serial: 3182246526754555285
+# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90
+# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d
+# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8
+-----BEGIN CERTIFICATE-----
+MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx
+NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv
+bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA
+VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku
+WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX
+5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ
+ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg
+h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Label: "GlobalSign Root CA - R6"
+# Serial: 1417766617973444989252670301619537
+# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae
+# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1
+# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg
+MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh
+bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx
+MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET
+MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI
+xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k
+ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD
+aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw
+LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw
+1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX
+k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2
+SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h
+bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n
+WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY
+rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce
+MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu
+bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN
+nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt
+Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61
+55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj
+vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf
+cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz
+oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp
+nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs
+pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v
+JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R
+8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4
+5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GC CA"
+# Serial: 44084345621038548146064804565436152554
+# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23
+# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31
+# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d
+-----BEGIN CERTIFICATE-----
+MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw
+CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91
+bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg
+Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ
+BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu
+ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS
+b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni
+eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W
+p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T
+rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV
+57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg
+Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Global G2 Root O=UniTrust
+# Subject: CN=UCA Global G2 Root O=UniTrust
+# Label: "UCA Global G2 Root"
+# Serial: 124779693093741543919145257850076631279
+# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8
+# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a
+# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH
+bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x
+CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds
+b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr
+b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9
+kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm
+VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R
+VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc
+C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj
+tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY
+D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv
+j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl
+NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6
+iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP
+O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV
+ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj
+L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5
+1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl
+1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU
+b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV
+PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj
+y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb
+EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg
+DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI
++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy
+YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX
+UB+K+wb1whnw0A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Extended Validation Root O=UniTrust
+# Subject: CN=UCA Extended Validation Root O=UniTrust
+# Label: "UCA Extended Validation Root"
+# Serial: 106100277556486529736699587978573607008
+# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2
+# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a
+# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF
+eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx
+MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV
+BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog
+D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS
+sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop
+O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk
+sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi
+c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj
+VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz
+KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/
+TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G
+sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs
+1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD
+fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T
+AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN
+l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR
+ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ
+VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5
+c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp
+4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s
+t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj
+2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO
+vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C
+xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx
+cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM
+fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Label: "Certigna Root CA"
+# Serial: 269714418870597844693661054334862075617
+# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77
+# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43
+# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68
+-----BEGIN CERTIFICATE-----
+MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw
+WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw
+MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x
+MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD
+VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX
+BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw
+ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO
+ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M
+CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu
+I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm
+TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh
+C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf
+ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz
+IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT
+Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k
+JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5
+hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB
+GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of
+1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov
+L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo
+dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr
+aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq
+hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L
+6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG
+HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6
+0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB
+lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi
+o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1
+gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v
+faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63
+Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh
+jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw
+3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign Root CA - G1"
+# Serial: 235931866688319308814040
+# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac
+# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c
+# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67
+-----BEGIN CERTIFICATE-----
+MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD
+VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU
+ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH
+MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO
+MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv
+Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz
+f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO
+8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq
+d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM
+tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt
+Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB
+o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD
+AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x
+PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM
+wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d
+GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH
+6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby
+RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx
+iN66zB+Afko=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign ECC Root CA - G3"
+# Serial: 287880440101571086945156
+# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40
+# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1
+# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b
+-----BEGIN CERTIFICATE-----
+MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG
+EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo
+bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g
+RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ
+TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s
+b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0
+WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS
+fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB
+zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq
+hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB
+CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD
++JbNR6iC8hZVdyR+EhCVBCyj
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign Root CA - C1"
+# Serial: 825510296613316004955058
+# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68
+# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01
+# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f
+-----BEGIN CERTIFICATE-----
+MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG
+A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg
+SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v
+dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ
+BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ
+HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH
+3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH
+GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c
+xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1
+aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq
+TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87
+/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4
+kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG
+YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT
++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo
+WXzhriKi4gp6D/piq1JM4fHfyr6DDUI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign ECC Root CA - C3"
+# Serial: 582948710642506000014504
+# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5
+# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66
+# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3
+-----BEGIN CERTIFICATE-----
+MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG
+EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx
+IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND
+IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci
+MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti
+sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O
+BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB
+Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c
+3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J
+0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Label: "Hongkong Post Root CA 3"
+# Serial: 46170865288971385588281144162979347873371282084
+# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0
+# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02
+# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6
+-----BEGIN CERTIFICATE-----
+MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL
+BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ
+SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n
+a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5
+NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT
+CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u
+Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO
+dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI
+VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV
+9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY
+2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY
+vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt
+bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb
+x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+
+l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK
+TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj
+Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e
+i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw
+DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG
+7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk
+MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr
+gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk
+GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS
+3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm
+Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+
+l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c
+JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP
+L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa
+LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG
+mpv0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G4"
+# Serial: 289383649854506086828220374796556676440
+# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88
+# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01
+# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88
+-----BEGIN CERTIFICATE-----
+MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw
+gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL
+Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg
+MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw
+BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0
+MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1
+c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ
+bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg
+Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B
+AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ
+2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E
+T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j
+5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM
+C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T
+DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX
+wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A
+2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm
+nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8
+dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl
+N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj
+c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS
+5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS
+Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr
+hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/
+B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI
+AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw
+H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+
+b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk
+2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol
+IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk
+5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY
+n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
+# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
+# Label: "Microsoft ECC Root Certificate Authority 2017"
+# Serial: 136839042543790627607696632466672567020
+# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67
+# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5
+# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02
+-----BEGIN CERTIFICATE-----
+MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD
+VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw
+MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV
+UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy
+b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR
+ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb
+hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3
+FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV
+L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB
+iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation
+# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation
+# Label: "Microsoft RSA Root Certificate Authority 2017"
+# Serial: 40975477897264996090493496164228220339
+# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47
+# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74
+# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0
+-----BEGIN CERTIFICATE-----
+MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl
+MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw
+NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5
+IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG
+EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N
+aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ
+Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0
+ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1
+HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm
+gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ
+jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc
+aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG
+YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6
+W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K
+UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH
++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q
+W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC
+LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC
+gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6
+tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh
+SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2
+TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3
+pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR
+xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp
+GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9
+dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN
+AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB
+RA+GsCyRxj3qrg+E
+-----END CERTIFICATE-----
+
+# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd.
+# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd.
+# Label: "e-Szigno Root CA 2017"
+# Serial: 411379200276854331539784714
+# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98
+# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1
+# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99
+-----BEGIN CERTIFICATE-----
+MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV
+BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk
+LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv
+b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ
+BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg
+THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v
+IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv
+xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H
+Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB
+eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo
+jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ
++efcMQ==
+-----END CERTIFICATE-----
+
+# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2
+# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2
+# Label: "certSIGN Root CA G2"
+# Serial: 313609486401300475190
+# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7
+# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32
+# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05
+-----BEGIN CERTIFICATE-----
+MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV
+BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g
+Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ
+BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ
+R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF
+dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw
+vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ
+uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp
+n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs
+cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW
+xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P
+rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF
+DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx
+DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy
+LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C
+eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ
+d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq
+kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC
+b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl
+qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0
+OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c
+NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk
+ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO
+pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj
+03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk
+PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE
+1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX
+QRBdJ3NghVdJIgc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc.
+# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc.
+# Label: "Trustwave Global Certification Authority"
+# Serial: 1846098327275375458322922162
+# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e
+# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5
+# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8
+-----BEGIN CERTIFICATE-----
+MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw
+CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x
+ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1
+c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx
+OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI
+SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI
+b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn
+swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu
+7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8
+1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW
+80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP
+JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l
+RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw
+hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10
+coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc
+BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n
+twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud
+EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud
+DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W
+0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe
+uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q
+lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB
+aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE
+sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT
+MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe
+qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh
+VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8
+h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9
+EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK
+yeC2nOnOcXHebD8WpHk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc.
+# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc.
+# Label: "Trustwave Global ECC P256 Certification Authority"
+# Serial: 4151900041497450638097112925
+# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54
+# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf
+# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4
+-----BEGIN CERTIFICATE-----
+MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD
+VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf
+BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3
+YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x
+NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G
+A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0
+d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF
+Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG
+SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN
+FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w
+DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw
+CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh
+DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc.
+# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc.
+# Label: "Trustwave Global ECC P384 Certification Authority"
+# Serial: 2704997926503831671788816187
+# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6
+# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2
+# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97
+-----BEGIN CERTIFICATE-----
+MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD
+VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf
+BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3
+YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x
+NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G
+A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0
+d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF
+Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ
+j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF
+1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G
+A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3
+AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC
+MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu
+Sw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp.
+# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp.
+# Label: "NAVER Global Root Certification Authority"
+# Serial: 9013692873798656336226253319739695165984492813
+# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b
+# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1
+# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65
+-----BEGIN CERTIFICATE-----
+MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM
+BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG
+T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx
+CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD
+b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA
+iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH
+38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE
+HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz
+kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP
+szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq
+vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf
+nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG
+YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo
+0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a
+CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K
+AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I
+36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB
+Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN
+qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj
+cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm
++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL
+hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe
+lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7
+p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8
+piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR
+LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX
+5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO
+dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul
+9XXeifdy
+-----END CERTIFICATE-----
+
+# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres
+# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres
+# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS"
+# Serial: 131542671362353147877283741781055151509
+# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb
+# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a
+# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb
+-----BEGIN CERTIFICATE-----
+MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw
+CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw
+FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S
+Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5
+MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL
+DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS
+QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH
+sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK
+Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu
+SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC
+MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy
+v+c=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa
+# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa
+# Label: "GlobalSign Root R46"
+# Serial: 1552617688466950547958867513931858518042577
+# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef
+# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90
+# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA
+MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD
+VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy
+MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt
+c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ
+OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG
+vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud
+316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo
+0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE
+y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF
+zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE
++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN
+I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs
+x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa
+ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC
+4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4
+7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg
+JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti
+2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk
+pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF
+FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt
+rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk
+ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5
+u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP
+4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6
+N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3
+vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa
+# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa
+# Label: "GlobalSign Root E46"
+# Serial: 1552617690338932563915843282459653771421763
+# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f
+# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84
+# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58
+-----BEGIN CERTIFICATE-----
+MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx
+CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD
+ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw
+MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex
+HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq
+R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd
+yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ
+7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8
++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH
+# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH
+# Label: "GLOBALTRUST 2020"
+# Serial: 109160994242082918454945253
+# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8
+# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2
+# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a
+-----BEGIN CERTIFICATE-----
+MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG
+A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw
+FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx
+MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u
+aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq
+hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b
+RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z
+YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3
+QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw
+yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+
+BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ
+SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH
+r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0
+4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me
+dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw
+q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2
+nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu
+H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA
+VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC
+XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd
+6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf
++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi
+kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7
+wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB
+TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C
+MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn
+4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I
+aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy
+qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
+# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
+# Label: "ANF Secure Server Root CA"
+# Serial: 996390341000653745
+# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96
+# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74
+# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99
+-----BEGIN CERTIFICATE-----
+MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV
+BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk
+YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV
+BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN
+MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF
+UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD
+VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v
+dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj
+cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q
+yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH
+2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX
+H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL
+zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR
+p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz
+W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/
+SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn
+LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3
+n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B
+u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj
+o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC
+AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L
+9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej
+rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK
+pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0
+vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq
+OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ
+/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9
+2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI
++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2
+MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo
+tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Label: "Certum EC-384 CA"
+# Serial: 160250656287871593594747141429395092468
+# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1
+# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed
+# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6
+-----BEGIN CERTIFICATE-----
+MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw
+CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw
+JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT
+EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0
+WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT
+LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX
+BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE
+KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm
+Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8
+EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J
+UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn
+nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Root CA"
+# Serial: 40870380103424195783807378461123655149
+# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29
+# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5
+# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd
+-----BEGIN CERTIFICATE-----
+MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6
+MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu
+MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV
+BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw
+MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg
+U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo
+b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ
+n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q
+p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq
+NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF
+8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3
+HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa
+mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi
+7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF
+ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P
+qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ
+v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6
+Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1
+vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD
+ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4
+WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo
+zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR
+5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ
+GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf
+5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq
+0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D
+P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM
+qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP
+0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf
+E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb
+-----END CERTIFICATE-----
+
+# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique
+# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique
+# Label: "TunTrust Root CA"
+# Serial: 108534058042236574382096126452369648152337120275
+# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4
+# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb
+# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41
+-----BEGIN CERTIFICATE-----
+MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL
+BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg
+Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv
+b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG
+EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u
+IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ
+n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd
+2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF
+VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ
+GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF
+li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU
+r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2
+eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb
+MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg
+jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB
+7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW
+5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE
+ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0
+90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z
+xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu
+QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4
+FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH
+22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP
+xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn
+dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5
+Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b
+nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ
+CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH
+u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj
+d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o=
+-----END CERTIFICATE-----
+
+# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Label: "HARICA TLS RSA Root CA 2021"
+# Serial: 76817823531813593706434026085292783742
+# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91
+# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d
+# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d
+-----BEGIN CERTIFICATE-----
+MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs
+MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg
+Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL
+MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl
+YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv
+b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l
+mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE
+4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv
+a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M
+pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw
+Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b
+LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY
+AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB
+AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq
+E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr
+W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ
+CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE
+AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU
+X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3
+f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja
+H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP
+JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P
+zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt
+jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0
+/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT
+BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79
+aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW
+xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU
+63ZTGI0RmLo=
+-----END CERTIFICATE-----
+
+# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Label: "HARICA TLS ECC Root CA 2021"
+# Serial: 137515985548005187474074462014555733966
+# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0
+# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48
+# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01
+-----BEGIN CERTIFICATE-----
+MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw
+CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh
+cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v
+dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG
+A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj
+aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg
+Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7
+KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y
+STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD
+AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw
+SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN
+nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
+# Serial: 1977337328857672817
+# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3
+# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe
+# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1
+MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc
+tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd
+IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j
+b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC
+AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw
+ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m
+iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF
+Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ
+hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P
+Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE
+EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV
+1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t
+CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR
+5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw
+f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9
+ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK
+GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV
+-----END CERTIFICATE-----
+
+# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd.
+# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd.
+# Label: "vTrus ECC Root CA"
+# Serial: 630369271402956006249506845124680065938238527194
+# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85
+# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1
+# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3
+-----BEGIN CERTIFICATE-----
+MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw
+RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY
+BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz
+MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u
+LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0
+v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd
+e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD
+VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw
+V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA
+AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG
+GJTO
+-----END CERTIFICATE-----
+
+# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd.
+# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd.
+# Label: "vTrus Root CA"
+# Serial: 387574501246983434957692974888460947164905180485
+# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc
+# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7
+# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87
+-----BEGIN CERTIFICATE-----
+MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL
+BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x
+FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx
+MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s
+THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD
+ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc
+IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU
+AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+
+GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9
+8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH
+flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt
+J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim
+0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN
+pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ
+UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW
+OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB
+AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet
+8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd
+nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j
+bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM
+Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv
+TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS
+S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr
+I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9
+b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB
+UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P
+Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven
+sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X2 O=Internet Security Research Group
+# Subject: CN=ISRG Root X2 O=Internet Security Research Group
+# Label: "ISRG Root X2"
+# Serial: 87493402998870891108772069816698636114
+# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5
+# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af
+# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70
+-----BEGIN CERTIFICATE-----
+MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw
+CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg
+R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00
+MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT
+ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw
+EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW
++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9
+ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI
+zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW
+tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1
+/q4AaOeMSQ+2b1tbFfLn
+-----END CERTIFICATE-----
+
+# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd.
+# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd.
+# Label: "HiPKI Root CA - G1"
+# Serial: 60966262342023497858655262305426234976
+# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3
+# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60
+# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc
+-----BEGIN CERTIFICATE-----
+MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa
+Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3
+YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw
+qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv
+Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6
+lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz
+Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ
+KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK
+FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj
+HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr
+y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ
+/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM
+a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6
+fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG
+SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi
+7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc
+SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza
+ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc
+XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg
+iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho
+L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF
+Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr
+kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+
+vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU
+YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Label: "GlobalSign ECC Root CA - R4"
+# Serial: 159662223612894884239637590694
+# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc
+# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28
+# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2
+-----BEGIN CERTIFICATE-----
+MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD
+VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh
+bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw
+MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g
+UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT
+BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx
+uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV
+HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/
++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147
+bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R1 O=Google Trust Services LLC
+# Subject: CN=GTS Root R1 O=Google Trust Services LLC
+# Label: "GTS Root R1"
+# Serial: 159662320309726417404178440727
+# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40
+# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a
+# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf
+-----BEGIN CERTIFICATE-----
+MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo
+27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w
+Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw
+TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl
+qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH
+szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8
+Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk
+MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92
+wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p
+aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN
+VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID
+AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E
+FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb
+C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe
+QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy
+h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4
+7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J
+ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef
+MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/
+Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT
+6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ
+0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm
+2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb
+bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R2 O=Google Trust Services LLC
+# Subject: CN=GTS Root R2 O=Google Trust Services LLC
+# Label: "GTS Root R2"
+# Serial: 159662449406622349769042896298
+# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc
+# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94
+# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8
+-----BEGIN CERTIFICATE-----
+MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt
+nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY
+6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu
+MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k
+RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg
+f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV
++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo
+dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW
+Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa
+G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq
+gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID
+AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E
+FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H
+vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8
+0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC
+B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u
+NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg
+yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev
+HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6
+xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR
+TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg
+JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV
+7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl
+6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R3 O=Google Trust Services LLC
+# Subject: CN=GTS Root R3 O=Google Trust Services LLC
+# Label: "GTS Root R3"
+# Serial: 159662495401136852707857743206
+# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73
+# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46
+# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48
+-----BEGIN CERTIFICATE-----
+MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD
+VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG
+A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw
+WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz
+IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi
+AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G
+jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2
+4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7
+VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm
+ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R4 O=Google Trust Services LLC
+# Subject: CN=GTS Root R4 O=Google Trust Services LLC
+# Label: "GTS Root R4"
+# Serial: 159662532700760215368942768210
+# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8
+# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47
+# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d
+-----BEGIN CERTIFICATE-----
+MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD
+VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG
+A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw
+WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz
+IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi
+AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi
+QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR
+HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D
+9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8
+p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD
+-----END CERTIFICATE-----
+
+# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj
+# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj
+# Label: "Telia Root CA v2"
+# Serial: 7288924052977061235122729490515358
+# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48
+# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd
+# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx
+CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE
+AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1
+NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ
+MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP
+ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq
+AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9
+vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9
+lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD
+n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT
+7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o
+6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC
+TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6
+WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R
+DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI
+pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj
+YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy
+rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw
+AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ
+8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi
+0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM
+A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS
+SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K
+TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF
+6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er
+3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt
+Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT
+VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW
+ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA
+rBPuUBQemMc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH
+# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH
+# Label: "D-TRUST BR Root CA 1 2020"
+# Serial: 165870826978392376648679885835942448534
+# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed
+# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67
+# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44
+-----BEGIN CERTIFICATE-----
+MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw
+CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS
+VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5
+NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG
+A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS
+zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0
+QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/
+VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g
+PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf
+Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l
+dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1
+c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO
+PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW
+wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV
+dWNbFJWcHwHP2NVypw87
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH
+# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH
+# Label: "D-TRUST EV Root CA 1 2020"
+# Serial: 126288379621884218666039612629459926992
+# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e
+# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07
+# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db
+-----BEGIN CERTIFICATE-----
+MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw
+CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS
+VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5
+NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG
+A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC
+/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD
+wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3
+OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g
+PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf
+Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l
+dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1
+c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO
+PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA
+y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb
+gfM0agPnIjhQW+0ZT0MW
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc.
+# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc.
+# Label: "DigiCert TLS ECC P384 Root G5"
+# Serial: 13129116028163249804115411775095713523
+# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed
+# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee
+# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05
+-----BEGIN CERTIFICATE-----
+MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp
+Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2
+MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ
+bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG
+ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS
+7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp
+0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS
+B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49
+BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ
+LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4
+DXZDjC5Ty3zfDBeWUA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc.
+# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc.
+# Label: "DigiCert TLS RSA4096 Root G5"
+# Serial: 11930366277458970227240571539258396554
+# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1
+# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35
+# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN
+MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT
+HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN
+NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs
+IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+
+ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0
+2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp
+wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM
+pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD
+nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po
+sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx
+Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd
+Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX
+KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe
+XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL
+tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv
+TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN
+AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw
+GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H
+PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF
+O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ
+REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik
+AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv
+/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+
+p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw
+MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF
+qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK
+ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certainly Root R1 O=Certainly
+# Subject: CN=Certainly Root R1 O=Certainly
+# Label: "Certainly Root R1"
+# Serial: 188833316161142517227353805653483829216
+# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12
+# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af
+# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0
+-----BEGIN CERTIFICATE-----
+MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw
+PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy
+dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0
+YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2
+1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT
+vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed
+aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0
+1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5
+r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5
+cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ
+wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ
+6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA
+2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH
+Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR
+eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB
+/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u
+d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr
+PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d
+8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi
+1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd
+rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di
+taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7
+lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj
+yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn
+Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy
+yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n
+wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6
+OV+KmalBWQewLK8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certainly Root E1 O=Certainly
+# Subject: CN=Certainly Root E1 O=Certainly
+# Label: "Certainly Root E1"
+# Serial: 8168531406727139161245376702891150584
+# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9
+# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b
+# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2
+-----BEGIN CERTIFICATE-----
+MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw
+CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu
+bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ
+BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s
+eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK
++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2
+QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4
+hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm
+ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG
+BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR
+-----END CERTIFICATE-----
+
+# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
+# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
+# Label: "Security Communication RootCA3"
+# Serial: 16247922307909811815
+# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26
+# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a
+# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94
+-----BEGIN CERTIFICATE-----
+MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV
+BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw
+JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2
+MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc
+U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg
+Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r
+CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA
+lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG
+TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7
+9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7
+8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4
+g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we
+GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst
++3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M
+0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ
+T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw
+HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS
+YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA
+FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd
+9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI
+UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+
+OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke
+gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf
+iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV
+nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD
+2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI//
+1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad
+TdJ0MN1kURXbg4NR16/9M51NZg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD.
+# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD.
+# Label: "Security Communication ECC RootCA1"
+# Serial: 15446673492073852651
+# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86
+# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41
+# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11
+-----BEGIN CERTIFICATE-----
+MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT
+AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD
+VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx
+NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT
+HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5
+IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi
+AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl
+dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK
+ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu
+9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O
+be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY
+# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY
+# Label: "BJCA Global Root CA1"
+# Serial: 113562791157148395269083148143378328608
+# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90
+# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a
+# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU
+MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI
+T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz
+MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF
+SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh
+bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z
+xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ
+spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5
+58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR
+at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll
+5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq
+nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK
+V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/
+pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO
+z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn
+jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+
+WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF
+7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
+AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4
+YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli
+awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u
++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88
+X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN
+SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo
+P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI
++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz
+znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9
+eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2
+YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy
+r/6zcCwupvI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY
+# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY
+# Label: "BJCA Global Root CA2"
+# Serial: 58605626836079930195615843123109055211
+# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c
+# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6
+# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82
+-----BEGIN CERTIFICATE-----
+MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw
+CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ
+VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy
+MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ
+TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS
+b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B
+IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+
++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK
+sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA
+94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B
+43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited
+# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited
+# Label: "Sectigo Public Server Authentication Root E46"
+# Serial: 88989738453351742415770396670917916916
+# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01
+# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a
+# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83
+-----BEGIN CERTIFICATE-----
+MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw
+CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T
+ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN
+MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG
+A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT
+ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC
+WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+
+6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B
+Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa
+qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q
+4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited
+# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited
+# Label: "Sectigo Public Server Authentication Root R46"
+# Serial: 156256931880233212765902055439220583700
+# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5
+# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38
+# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06
+-----BEGIN CERTIFICATE-----
+MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf
+MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD
+Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw
+HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY
+MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp
+YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa
+ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz
+SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf
+iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X
+ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3
+IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS
+VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE
+SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu
++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt
+8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L
+HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt
+zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P
+AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c
+mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ
+YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52
+gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA
+Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB
+JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX
+DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui
+TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5
+dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65
+LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp
+0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY
+QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation
+# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation
+# Label: "SSL.com TLS RSA Root CA 2022"
+# Serial: 148535279242832292258835760425842727825
+# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da
+# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca
+# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed
+-----BEGIN CERTIFICATE-----
+MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO
+MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD
+DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX
+DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw
+b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC
+AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP
+L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY
+t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins
+S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3
+PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO
+L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3
+R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w
+dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS
++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS
+d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG
+AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f
+gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j
+BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z
+NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt
+hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM
+QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf
+R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ
+DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW
+P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy
+lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq
+bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w
+AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q
+r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji
+Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU
+98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation
+# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation
+# Label: "SSL.com TLS ECC Root CA 2022"
+# Serial: 26605119622390491762507526719404364228
+# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5
+# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39
+# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43
+-----BEGIN CERTIFICATE-----
+MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw
+CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT
+U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2
+MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh
+dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG
+ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm
+acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN
+SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME
+GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW
+uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp
+15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN
+b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos
+# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos
+# Label: "Atos TrustedRoot Root CA ECC TLS 2021"
+# Serial: 81873346711060652204712539181482831616
+# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8
+# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd
+# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8
+-----BEGIN CERTIFICATE-----
+MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w
+LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w
+CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0
+MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF
+Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI
+zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X
+tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4
+AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2
+KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD
+aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu
+CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo
+9H1/IISpQuQo
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos
+# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos
+# Label: "Atos TrustedRoot Root CA RSA TLS 2021"
+# Serial: 111436099570196163832749341232207667876
+# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2
+# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48
+# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f
+-----BEGIN CERTIFICATE-----
+MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM
+MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx
+MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00
+MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD
+QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN
+BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z
+4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv
+Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ
+kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs
+GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln
+nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh
+3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD
+0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy
+geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8
+ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB
+c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI
+pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB
+DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS
+4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs
+o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ
+qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw
+xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM
+rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4
+AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR
+0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY
+o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5
+dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE
+oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc.
+# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc.
+# Label: "TrustAsia Global Root CA G3"
+# Serial: 576386314500428537169965010905813481816650257167
+# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04
+# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7
+# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08
+-----BEGIN CERTIFICATE-----
+MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM
+BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp
+ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe
+Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw
+IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU
+cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC
+DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS
+T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK
+AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1
+nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep
+qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA
+yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs
+hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX
+zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv
+kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT
+f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA
+uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB
+o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih
+MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E
+BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4
+wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2
+XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1
+JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j
+ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV
+VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx
+xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on
+AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d
+7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj
+gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV
++Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo
+FGWsJwt0ivKH
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc.
+# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc.
+# Label: "TrustAsia Global Root CA G4"
+# Serial: 451799571007117016466790293371524403291602933463
+# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb
+# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a
+# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c
+-----BEGIN CERTIFICATE-----
+MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw
+WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs
+IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y
+MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD
+VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz
+dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx
+s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw
+LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij
+YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD
+pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE
+AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR
+UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj
+/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope
+# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope
+# Label: "CommScope Public Trust ECC Root-01"
+# Serial: 385011430473757362783587124273108818652468453534
+# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16
+# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d
+# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b
+-----BEGIN CERTIFICATE-----
+MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw
+TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t
+bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa
+Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv
+cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C
+flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE
+hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD
+VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq
+hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg
+2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS
+Um9poIyNStDuiw7LR47QjRE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope
+# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope
+# Label: "CommScope Public Trust ECC Root-02"
+# Serial: 234015080301808452132356021271193974922492992893
+# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2
+# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5
+# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a
+-----BEGIN CERTIFICATE-----
+MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw
+TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t
+bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa
+Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv
+cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL
+j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU
+v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD
+VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq
+hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n
+ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV
+mkzw5l4lIhVtwodZ0LKOag==
+-----END CERTIFICATE-----
+
+# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope
+# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope
+# Label: "CommScope Public Trust RSA Root-01"
+# Serial: 354030733275608256394402989253558293562031411421
+# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8
+# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93
+# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL
+BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi
+Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1
+NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t
+U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt
+MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk
+YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh
+suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al
+DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj
+WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl
+P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547
+KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p
+UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/
+kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO
+Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB
+Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U
+CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ
+KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6
+NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ
+nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+
+QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v
+trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a
+aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD
+j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4
+Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w
+lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn
+YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc
+icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw
+-----END CERTIFICATE-----
+
+# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope
+# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope
+# Label: "CommScope Public Trust RSA Root-02"
+# Serial: 480062499834624527752716769107743131258796508494
+# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa
+# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae
+# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL
+BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi
+Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2
+NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t
+U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt
+MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE
+NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0
+kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C
+rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz
+hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2
+LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs
+n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku
+FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5
+kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3
+wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v
+wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs
+5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ
+KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB
+KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3
++VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme
+APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq
+pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT
+6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF
+sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt
+PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d
+lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670
+v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O
+rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7
+-----END CERTIFICATE-----
+
+# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH
+# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH
+# Label: "Telekom Security TLS ECC Root 2020"
+# Serial: 72082518505882327255703894282316633856
+# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd
+# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec
+# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1
+-----BEGIN CERTIFICATE-----
+MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw
+CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH
+bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw
+MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx
+JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE
+AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O
+tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP
+f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA
+MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di
+z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn
+27iQ7t0l
+-----END CERTIFICATE-----
+
+# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH
+# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH
+# Label: "Telekom Security TLS RSA Root 2023"
+# Serial: 44676229530606711399881795178081572759
+# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2
+# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93
+# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46
+-----BEGIN CERTIFICATE-----
+MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj
+MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0
+eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy
+MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC
+REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG
+A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9
+cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV
+cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA
+U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6
+Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug
+BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy
+8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J
+co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg
+8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8
+rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12
+mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg
++y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX
+gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2
+p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ
+pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm
+9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw
+M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd
+GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+
+CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t
+xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+
+w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK
+L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj
+X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q
+ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm
+dTdmQRCsu/WU48IxK63nI1bMNSWSs1A=
+-----END CERTIFICATE-----
diff --git a/Lib/site-packages/certifi/core.py b/Lib/site-packages/certifi/core.py
new file mode 100644
index 0000000..91f538b
--- /dev/null
+++ b/Lib/site-packages/certifi/core.py
@@ -0,0 +1,114 @@
+"""
+certifi.py
+~~~~~~~~~~
+
+This module returns the installation location of cacert.pem or its contents.
+"""
+import sys
+import atexit
+
+def exit_cacert_ctx() -> None:
+ _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
+
+
+if sys.version_info >= (3, 11):
+
+ from importlib.resources import as_file, files
+
+ _CACERT_CTX = None
+ _CACERT_PATH = None
+
+ def where() -> str:
+ # This is slightly terrible, but we want to delay extracting the file
+ # in cases where we're inside of a zipimport situation until someone
+ # actually calls where(), but we don't want to re-extract the file
+ # on every call of where(), so we'll do it once then store it in a
+ # global variable.
+ global _CACERT_CTX
+ global _CACERT_PATH
+ if _CACERT_PATH is None:
+ # This is slightly janky, the importlib.resources API wants you to
+ # manage the cleanup of this file, so it doesn't actually return a
+ # path, it returns a context manager that will give you the path
+ # when you enter it and will do any cleanup when you leave it. In
+ # the common case of not needing a temporary file, it will just
+ # return the file system location and the __exit__() is a no-op.
+ #
+ # We also have to hold onto the actual context manager, because
+ # it will do the cleanup whenever it gets garbage collected, so
+ # we will also store that at the global level as well.
+ _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
+ _CACERT_PATH = str(_CACERT_CTX.__enter__())
+ atexit.register(exit_cacert_ctx)
+
+ return _CACERT_PATH
+
+ def contents() -> str:
+ return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii")
+
+elif sys.version_info >= (3, 7):
+
+ from importlib.resources import path as get_path, read_text
+
+ _CACERT_CTX = None
+ _CACERT_PATH = None
+
+ def where() -> str:
+ # This is slightly terrible, but we want to delay extracting the
+ # file in cases where we're inside of a zipimport situation until
+ # someone actually calls where(), but we don't want to re-extract
+ # the file on every call of where(), so we'll do it once then store
+ # it in a global variable.
+ global _CACERT_CTX
+ global _CACERT_PATH
+ if _CACERT_PATH is None:
+ # This is slightly janky, the importlib.resources API wants you
+ # to manage the cleanup of this file, so it doesn't actually
+ # return a path, it returns a context manager that will give
+ # you the path when you enter it and will do any cleanup when
+ # you leave it. In the common case of not needing a temporary
+ # file, it will just return the file system location and the
+ # __exit__() is a no-op.
+ #
+ # We also have to hold onto the actual context manager, because
+ # it will do the cleanup whenever it gets garbage collected, so
+ # we will also store that at the global level as well.
+ _CACERT_CTX = get_path("certifi", "cacert.pem")
+ _CACERT_PATH = str(_CACERT_CTX.__enter__())
+ atexit.register(exit_cacert_ctx)
+
+ return _CACERT_PATH
+
+ def contents() -> str:
+ return read_text("certifi", "cacert.pem", encoding="ascii")
+
+else:
+ import os
+ import types
+ from typing import Union
+
+ Package = Union[types.ModuleType, str]
+ Resource = Union[str, "os.PathLike"]
+
+ # This fallback will work for Python versions prior to 3.7 that lack the
+ # importlib.resources module but relies on the existing `where` function
+ # so won't address issues with environments like PyOxidizer that don't set
+ # __file__ on modules.
+ def read_text(
+ package: Package,
+ resource: Resource,
+ encoding: str = 'utf-8',
+ errors: str = 'strict'
+ ) -> str:
+ with open(where(), encoding=encoding) as data:
+ return data.read()
+
+ # If we don't have importlib.resources, then we will just do the old logic
+ # of assuming we're on the filesystem and munge the path directly.
+ def where() -> str:
+ f = os.path.dirname(__file__)
+
+ return os.path.join(f, "cacert.pem")
+
+ def contents() -> str:
+ return read_text("certifi", "cacert.pem", encoding="ascii")
diff --git a/Lib/site-packages/certifi/py.typed b/Lib/site-packages/certifi/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/charset_normalizer-3.3.2.dist-info/INSTALLER b/Lib/site-packages/charset_normalizer-3.3.2.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/Lib/site-packages/charset_normalizer-3.3.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Lib/site-packages/charset_normalizer-3.3.2.dist-info/LICENSE b/Lib/site-packages/charset_normalizer-3.3.2.dist-info/LICENSE
new file mode 100644
index 0000000..ad82355
--- /dev/null
+++ b/Lib/site-packages/charset_normalizer-3.3.2.dist-info/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2019 TAHRI Ahmed R.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/Lib/site-packages/charset_normalizer-3.3.2.dist-info/METADATA b/Lib/site-packages/charset_normalizer-3.3.2.dist-info/METADATA
new file mode 100644
index 0000000..822550e
--- /dev/null
+++ b/Lib/site-packages/charset_normalizer-3.3.2.dist-info/METADATA
@@ -0,0 +1,683 @@
+Metadata-Version: 2.1
+Name: charset-normalizer
+Version: 3.3.2
+Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.
+Home-page: https://github.com/Ousret/charset_normalizer
+Author: Ahmed TAHRI
+Author-email: ahmed.tahri@cloudnursery.dev
+License: MIT
+Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues
+Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest
+Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Intended Audience :: Developers
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Text Processing :: Linguistic
+Classifier: Topic :: Utilities
+Classifier: Typing :: Typed
+Requires-Python: >=3.7.0
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Provides-Extra: unicode_backport
+
+
+ In other language (unofficial port - by the community)
+
+
+
+
+
+> A library that helps you read text from an unknown charset encoding. Motivated by `chardet`,
+> I'm trying to resolve the issue by taking a new approach.
+> All IANA character set names for which the Python core library provides codecs are supported.
+
+
documentation:
+// summary: >
+// The Google Calendar API gives access
+// to most calendar features.
+// pages:
+// - name: Overview
+// content: (== include google/foo/overview.md ==)
+// - name: Tutorial
+// content: (== include google/foo/tutorial.md ==)
+// subpages;
+// - name: Java
+// content: (== include google/foo/tutorial_java.md ==)
+// rules:
+// - selector: google.calendar.Calendar.Get
+// description: >
+// ...
+// - selector: google.calendar.Calendar.Put
+// description: >
+// ...
+//
+// Documentation is provided in markdown syntax. In addition to
+// standard markdown features, definition lists, tables and fenced
+// code blocks are supported. Section headers can be provided and are
+// interpreted relative to the section nesting of the context where
+// a documentation fragment is embedded.
+//
+// Documentation from the IDL is merged with documentation defined
+// via the config at normalization time, where documentation provided
+// by config rules overrides IDL provided.
+//
+// A number of constructs specific to the API platform are supported
+// in documentation text.
+//
+// In order to reference a proto element, the following
+// notation can be used:
+//
[fully.qualified.proto.name][]
+// To override the display text used for the link, this can be used:
+//
[display text][fully.qualified.proto.name]
+// Text can be excluded from doc using the following notation:
+//
(-- internal comment --)
+//
+// A few directives are available in documentation. Note that
+// directives must appear on a single line to be properly
+// identified. The `include` directive includes a markdown file from
+// an external source:
+//
(== include path/to/file ==)
+// The `resource_for` directive marks a message to be the resource of
+// a collection in REST view. If it is not specified, tools attempt
+// to infer the resource from the operations in a collection:
+//
(== resource_for v1.shelves.books ==)
+// The directive `suppress_warning` does not directly affect documentation
+// and is documented together with service config validation.
+message Documentation {
+ // A short description of what the service does. The summary must be plain
+ // text. It becomes the overview of the service displayed in Google Cloud
+ // Console.
+ // NOTE: This field is equivalent to the standard field `description`.
+ string summary = 1;
+
+ // The top level pages for the documentation set.
+ repeated Page pages = 5;
+
+ // A list of documentation rules that apply to individual API elements.
+ //
+ // **NOTE:** All service configuration rules follow "last one wins" order.
+ repeated DocumentationRule rules = 3;
+
+ // The URL to the root of documentation.
+ string documentation_root_url = 4;
+
+ // Specifies the service root url if the default one (the service name
+ // from the yaml file) is not suitable. This can be seen in any fully
+ // specified service urls as well as sections that show a base that other
+ // urls are relative to.
+ string service_root_url = 6;
+
+ // Declares a single overview page. For example:
+ //
+ // Note: you cannot specify both `overview` field and `pages` field.
+ string overview = 2;
+}
+
+// A documentation rule provides information about individual API elements.
+message DocumentationRule {
+ // The selector is a comma-separated list of patterns for any element such as
+ // a method, a field, an enum value. Each pattern is a qualified name of the
+ // element which may end in "*", indicating a wildcard. Wildcards are only
+ // allowed at the end and for a whole component of the qualified name,
+ // i.e. "foo.*" is ok, but not "foo.b*" or "foo.*.bar". A wildcard will match
+ // one or more components. To specify a default for all applicable elements,
+ // the whole pattern "*" is used.
+ string selector = 1;
+
+ // Description of the selected proto element (e.g. a message, a method, a
+ // 'service' definition, or a field). Defaults to leading & trailing comments
+ // taken from the proto source definition of the proto element.
+ string description = 2;
+
+ // Deprecation description of the selected element(s). It can be provided if
+ // an element is marked as `deprecated`.
+ string deprecation_description = 3;
+}
+
+// Represents a documentation page. A page can contain subpages to represent
+// nested documentation set structure.
+message Page {
+ // The name of the page. It will be used as an identity of the page to
+ // generate URI of the page, text of the link to this page in navigation,
+ // etc. The full page name (start from the root page name to this page
+ // concatenated with `.`) can be used as reference to the page in your
+ // documentation. For example:
+ //
+ // You can reference `Java` page using Markdown reference link syntax:
+ // `[Java][Tutorial.Java]`.
+ string name = 1;
+
+ // The Markdown content of the page. You can use (== include {path}
+ // ==) to include content from a Markdown file. The content can be
+ // used to produce the documentation page such as HTML format page.
+ string content = 2;
+
+ // Subpages of this page. The order of subpages specified here will be
+ // honored in the generated docset.
+ repeated Page subpages = 3;
+}
diff --git a/Lib/site-packages/google/api/documentation_pb2.py b/Lib/site-packages/google/api/documentation_pb2.py
new file mode 100644
index 0000000..4e6def5
--- /dev/null
+++ b/Lib/site-packages/google/api/documentation_pb2.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/documentation.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x1egoogle/api/documentation.proto\x12\ngoogle.api"\xbb\x01\n\rDocumentation\x12\x0f\n\x07summary\x18\x01 \x01(\t\x12\x1f\n\x05pages\x18\x05 \x03(\x0b\x32\x10.google.api.Page\x12,\n\x05rules\x18\x03 \x03(\x0b\x32\x1d.google.api.DocumentationRule\x12\x1e\n\x16\x64ocumentation_root_url\x18\x04 \x01(\t\x12\x18\n\x10service_root_url\x18\x06 \x01(\t\x12\x10\n\x08overview\x18\x02 \x01(\t"[\n\x11\x44ocumentationRule\x12\x10\n\x08selector\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x1f\n\x17\x64\x65precation_description\x18\x03 \x01(\t"I\n\x04Page\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\t\x12"\n\x08subpages\x18\x03 \x03(\x0b\x32\x10.google.api.PageBt\n\x0e\x63om.google.apiB\x12\x44ocumentationProtoP\x01ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+_DOCUMENTATION = DESCRIPTOR.message_types_by_name["Documentation"]
+_DOCUMENTATIONRULE = DESCRIPTOR.message_types_by_name["DocumentationRule"]
+_PAGE = DESCRIPTOR.message_types_by_name["Page"]
+Documentation = _reflection.GeneratedProtocolMessageType(
+ "Documentation",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _DOCUMENTATION,
+ "__module__": "google.api.documentation_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.Documentation)
+ },
+)
+_sym_db.RegisterMessage(Documentation)
+
+DocumentationRule = _reflection.GeneratedProtocolMessageType(
+ "DocumentationRule",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _DOCUMENTATIONRULE,
+ "__module__": "google.api.documentation_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.DocumentationRule)
+ },
+)
+_sym_db.RegisterMessage(DocumentationRule)
+
+Page = _reflection.GeneratedProtocolMessageType(
+ "Page",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _PAGE,
+ "__module__": "google.api.documentation_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.Page)
+ },
+)
+_sym_db.RegisterMessage(Page)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\022DocumentationProtoP\001ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\242\002\004GAPI"
+ _DOCUMENTATION._serialized_start = 47
+ _DOCUMENTATION._serialized_end = 234
+ _DOCUMENTATIONRULE._serialized_start = 236
+ _DOCUMENTATIONRULE._serialized_end = 327
+ _PAGE._serialized_start = 329
+ _PAGE._serialized_end = 402
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/endpoint.proto b/Lib/site-packages/google/api/endpoint.proto
new file mode 100644
index 0000000..7f6dca7
--- /dev/null
+++ b/Lib/site-packages/google/api/endpoint.proto
@@ -0,0 +1,73 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig";
+option java_multiple_files = true;
+option java_outer_classname = "EndpointProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// `Endpoint` describes a network address of a service that serves a set of
+// APIs. It is commonly known as a service endpoint. A service may expose
+// any number of service endpoints, and all service endpoints share the same
+// service definition, such as quota limits and monitoring metrics.
+//
+// Example:
+//
+// type: google.api.Service
+// name: library-example.googleapis.com
+// endpoints:
+// # Declares network address `https://library-example.googleapis.com`
+// # for service `library-example.googleapis.com`. The `https` scheme
+// # is implicit for all service endpoints. Other schemes may be
+// # supported in the future.
+// - name: library-example.googleapis.com
+// allow_cors: false
+// - name: content-staging-library-example.googleapis.com
+// # Allows HTTP OPTIONS calls to be passed to the API frontend, for it
+// # to decide whether the subsequent cross-origin request is allowed
+// # to proceed.
+// allow_cors: true
+message Endpoint {
+ // The canonical name of this endpoint.
+ string name = 1;
+
+ // Unimplemented. Dot not use.
+ //
+ // DEPRECATED: This field is no longer supported. Instead of using aliases,
+ // please specify multiple [google.api.Endpoint][google.api.Endpoint] for each
+ // of the intended aliases.
+ //
+ // Additional names that this endpoint will be hosted on.
+ repeated string aliases = 2 [deprecated = true];
+
+ // The specification of an Internet routable address of API frontend that will
+ // handle requests to this [API
+ // Endpoint](https://cloud.google.com/apis/design/glossary). It should be
+ // either a valid IPv4 address or a fully-qualified domain name. For example,
+ // "8.8.8.8" or "myservice.appspot.com".
+ string target = 101;
+
+ // Allowing
+ // [CORS](https://en.wikipedia.org/wiki/Cross-origin_resource_sharing), aka
+ // cross-domain traffic, would allow the backends served from this endpoint to
+ // receive and respond to HTTP OPTIONS requests. The response will be used by
+ // the browser to determine whether the subsequent cross-origin request is
+ // allowed to proceed.
+ bool allow_cors = 5;
+}
diff --git a/Lib/site-packages/google/api/endpoint_pb2.py b/Lib/site-packages/google/api/endpoint_pb2.py
new file mode 100644
index 0000000..3d46005
--- /dev/null
+++ b/Lib/site-packages/google/api/endpoint_pb2.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/endpoint.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x19google/api/endpoint.proto\x12\ngoogle.api"Q\n\x08\x45ndpoint\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x07\x61liases\x18\x02 \x03(\tB\x02\x18\x01\x12\x0e\n\x06target\x18\x65 \x01(\t\x12\x12\n\nallow_cors\x18\x05 \x01(\x08\x42o\n\x0e\x63om.google.apiB\rEndpointProtoP\x01ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+_ENDPOINT = DESCRIPTOR.message_types_by_name["Endpoint"]
+Endpoint = _reflection.GeneratedProtocolMessageType(
+ "Endpoint",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _ENDPOINT,
+ "__module__": "google.api.endpoint_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.Endpoint)
+ },
+)
+_sym_db.RegisterMessage(Endpoint)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\rEndpointProtoP\001ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\242\002\004GAPI"
+ _ENDPOINT.fields_by_name["aliases"]._options = None
+ _ENDPOINT.fields_by_name["aliases"]._serialized_options = b"\030\001"
+ _ENDPOINT._serialized_start = 41
+ _ENDPOINT._serialized_end = 122
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/error_reason.proto b/Lib/site-packages/google/api/error_reason.proto
new file mode 100644
index 0000000..c0509be
--- /dev/null
+++ b/Lib/site-packages/google/api/error_reason.proto
@@ -0,0 +1,570 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+option go_package = "google.golang.org/genproto/googleapis/api/error_reason;error_reason";
+option java_multiple_files = true;
+option java_outer_classname = "ErrorReasonProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// Defines the supported values for `google.rpc.ErrorInfo.reason` for the
+// `googleapis.com` error domain. This error domain is reserved for [Service
+// Infrastructure](https://cloud.google.com/service-infrastructure/docs/overview).
+// For each error info of this domain, the metadata key "service" refers to the
+// logical identifier of an API service, such as "pubsub.googleapis.com". The
+// "consumer" refers to the entity that consumes an API Service. It typically is
+// a Google project that owns the client application or the server resource,
+// such as "projects/123". Other metadata keys are specific to each error
+// reason. For more information, see the definition of the specific error
+// reason.
+enum ErrorReason {
+ // Do not use this default value.
+ ERROR_REASON_UNSPECIFIED = 0;
+
+ // The request is calling a disabled service for a consumer.
+ //
+ // Example of an ErrorInfo when the consumer "projects/123" contacting
+ // "pubsub.googleapis.com" service which is disabled:
+ //
+ // { "reason": "SERVICE_DISABLED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "pubsub.googleapis.com"
+ // }
+ // }
+ //
+ // This response indicates the "pubsub.googleapis.com" has been disabled in
+ // "projects/123".
+ SERVICE_DISABLED = 1;
+
+ // The request whose associated billing account is disabled.
+ //
+ // Example of an ErrorInfo when the consumer "projects/123" fails to contact
+ // "pubsub.googleapis.com" service because the associated billing account is
+ // disabled:
+ //
+ // { "reason": "BILLING_DISABLED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "pubsub.googleapis.com"
+ // }
+ // }
+ //
+ // This response indicates the billing account associated has been disabled.
+ BILLING_DISABLED = 2;
+
+ // The request is denied because the provided [API
+ // key](https://cloud.google.com/docs/authentication/api-keys) is invalid. It
+ // may be in a bad format, cannot be found, or has been expired).
+ //
+ // Example of an ErrorInfo when the request is contacting
+ // "storage.googleapis.com" service with an invalid API key:
+ //
+ // { "reason": "API_KEY_INVALID",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "service": "storage.googleapis.com",
+ // }
+ // }
+ API_KEY_INVALID = 3;
+
+ // The request is denied because it violates [API key API
+ // restrictions](https://cloud.google.com/docs/authentication/api-keys#adding_api_restrictions).
+ //
+ // Example of an ErrorInfo when the consumer "projects/123" fails to call the
+ // "storage.googleapis.com" service because this service is restricted in the
+ // API key:
+ //
+ // { "reason": "API_KEY_SERVICE_BLOCKED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "storage.googleapis.com"
+ // }
+ // }
+ API_KEY_SERVICE_BLOCKED = 4;
+
+ // The request is denied because it violates [API key HTTP
+ // restrictions](https://cloud.google.com/docs/authentication/api-keys#adding_http_restrictions).
+ //
+ // Example of an ErrorInfo when the consumer "projects/123" fails to call
+ // "storage.googleapis.com" service because the http referrer of the request
+ // violates API key HTTP restrictions:
+ //
+ // { "reason": "API_KEY_HTTP_REFERRER_BLOCKED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "storage.googleapis.com",
+ // }
+ // }
+ API_KEY_HTTP_REFERRER_BLOCKED = 7;
+
+ // The request is denied because it violates [API key IP address
+ // restrictions](https://cloud.google.com/docs/authentication/api-keys#adding_application_restrictions).
+ //
+ // Example of an ErrorInfo when the consumer "projects/123" fails to call
+ // "storage.googleapis.com" service because the caller IP of the request
+ // violates API key IP address restrictions:
+ //
+ // { "reason": "API_KEY_IP_ADDRESS_BLOCKED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "storage.googleapis.com",
+ // }
+ // }
+ API_KEY_IP_ADDRESS_BLOCKED = 8;
+
+ // The request is denied because it violates [API key Android application
+ // restrictions](https://cloud.google.com/docs/authentication/api-keys#adding_application_restrictions).
+ //
+ // Example of an ErrorInfo when the consumer "projects/123" fails to call
+ // "storage.googleapis.com" service because the request from the Android apps
+ // violates the API key Android application restrictions:
+ //
+ // { "reason": "API_KEY_ANDROID_APP_BLOCKED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "storage.googleapis.com"
+ // }
+ // }
+ API_KEY_ANDROID_APP_BLOCKED = 9;
+
+ // The request is denied because it violates [API key iOS application
+ // restrictions](https://cloud.google.com/docs/authentication/api-keys#adding_application_restrictions).
+ //
+ // Example of an ErrorInfo when the consumer "projects/123" fails to call
+ // "storage.googleapis.com" service because the request from the iOS apps
+ // violates the API key iOS application restrictions:
+ //
+ // { "reason": "API_KEY_IOS_APP_BLOCKED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "storage.googleapis.com"
+ // }
+ // }
+ API_KEY_IOS_APP_BLOCKED = 13;
+
+ // The request is denied because there is not enough rate quota for the
+ // consumer.
+ //
+ // Example of an ErrorInfo when the consumer "projects/123" fails to contact
+ // "pubsub.googleapis.com" service because consumer's rate quota usage has
+ // reached the maximum value set for the quota limit
+ // "ReadsPerMinutePerProject" on the quota metric
+ // "pubsub.googleapis.com/read_requests":
+ //
+ // { "reason": "RATE_LIMIT_EXCEEDED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "pubsub.googleapis.com",
+ // "quota_metric": "pubsub.googleapis.com/read_requests",
+ // "quota_limit": "ReadsPerMinutePerProject"
+ // }
+ // }
+ //
+ // Example of an ErrorInfo when the consumer "projects/123" checks quota on
+ // the service "dataflow.googleapis.com" and hits the organization quota
+ // limit "DefaultRequestsPerMinutePerOrganization" on the metric
+ // "dataflow.googleapis.com/default_requests".
+ //
+ // { "reason": "RATE_LIMIT_EXCEEDED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "dataflow.googleapis.com",
+ // "quota_metric": "dataflow.googleapis.com/default_requests",
+ // "quota_limit": "DefaultRequestsPerMinutePerOrganization"
+ // }
+ // }
+ RATE_LIMIT_EXCEEDED = 5;
+
+ // The request is denied because there is not enough resource quota for the
+ // consumer.
+ //
+ // Example of an ErrorInfo when the consumer "projects/123" fails to contact
+ // "compute.googleapis.com" service because consumer's resource quota usage
+ // has reached the maximum value set for the quota limit "VMsPerProject"
+ // on the quota metric "compute.googleapis.com/vms":
+ //
+ // { "reason": "RESOURCE_QUOTA_EXCEEDED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "compute.googleapis.com",
+ // "quota_metric": "compute.googleapis.com/vms",
+ // "quota_limit": "VMsPerProject"
+ // }
+ // }
+ //
+ // Example of an ErrorInfo when the consumer "projects/123" checks resource
+ // quota on the service "dataflow.googleapis.com" and hits the organization
+ // quota limit "jobs-per-organization" on the metric
+ // "dataflow.googleapis.com/job_count".
+ //
+ // { "reason": "RESOURCE_QUOTA_EXCEEDED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "dataflow.googleapis.com",
+ // "quota_metric": "dataflow.googleapis.com/job_count",
+ // "quota_limit": "jobs-per-organization"
+ // }
+ // }
+ RESOURCE_QUOTA_EXCEEDED = 6;
+
+ // The request whose associated billing account address is in a tax restricted
+ // location, violates the local tax restrictions when creating resources in
+ // the restricted region.
+ //
+ // Example of an ErrorInfo when creating the Cloud Storage Bucket in the
+ // container "projects/123" under a tax restricted region
+ // "locations/asia-northeast3":
+ //
+ // { "reason": "LOCATION_TAX_POLICY_VIOLATED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "storage.googleapis.com",
+ // "location": "locations/asia-northeast3"
+ // }
+ // }
+ //
+ // This response indicates creating the Cloud Storage Bucket in
+ // "locations/asia-northeast3" violates the location tax restriction.
+ LOCATION_TAX_POLICY_VIOLATED = 10;
+
+ // The request is denied because the caller does not have required permission
+ // on the user project "projects/123" or the user project is invalid. For more
+ // information, check the [userProject System
+ // Parameters](https://cloud.google.com/apis/docs/system-parameters).
+ //
+ // Example of an ErrorInfo when the caller is calling Cloud Storage service
+ // with insufficient permissions on the user project:
+ //
+ // { "reason": "USER_PROJECT_DENIED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "storage.googleapis.com"
+ // }
+ // }
+ USER_PROJECT_DENIED = 11;
+
+ // The request is denied because the consumer "projects/123" is suspended due
+ // to Terms of Service(Tos) violations. Check [Project suspension
+ // guidelines](https://cloud.google.com/resource-manager/docs/project-suspension-guidelines)
+ // for more information.
+ //
+ // Example of an ErrorInfo when calling Cloud Storage service with the
+ // suspended consumer "projects/123":
+ //
+ // { "reason": "CONSUMER_SUSPENDED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "storage.googleapis.com"
+ // }
+ // }
+ CONSUMER_SUSPENDED = 12;
+
+ // The request is denied because the associated consumer is invalid. It may be
+ // in a bad format, cannot be found, or have been deleted.
+ //
+ // Example of an ErrorInfo when calling Cloud Storage service with the
+ // invalid consumer "projects/123":
+ //
+ // { "reason": "CONSUMER_INVALID",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "storage.googleapis.com"
+ // }
+ // }
+ CONSUMER_INVALID = 14;
+
+ // The request is denied because it violates [VPC Service
+ // Controls](https://cloud.google.com/vpc-service-controls/docs/overview).
+ // The 'uid' field is a random generated identifier that customer can use it
+ // to search the audit log for a request rejected by VPC Service Controls. For
+ // more information, please refer [VPC Service Controls
+ // Troubleshooting](https://cloud.google.com/vpc-service-controls/docs/troubleshooting#unique-id)
+ //
+ // Example of an ErrorInfo when the consumer "projects/123" fails to call
+ // Cloud Storage service because the request is prohibited by the VPC Service
+ // Controls.
+ //
+ // { "reason": "SECURITY_POLICY_VIOLATED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "uid": "123456789abcde",
+ // "consumer": "projects/123",
+ // "service": "storage.googleapis.com"
+ // }
+ // }
+ SECURITY_POLICY_VIOLATED = 15;
+
+ // The request is denied because the provided access token has expired.
+ //
+ // Example of an ErrorInfo when the request is calling Cloud Storage service
+ // with an expired access token:
+ //
+ // { "reason": "ACCESS_TOKEN_EXPIRED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "service": "storage.googleapis.com",
+ // "method": "google.storage.v1.Storage.GetObject"
+ // }
+ // }
+ ACCESS_TOKEN_EXPIRED = 16;
+
+ // The request is denied because the provided access token doesn't have at
+ // least one of the acceptable scopes required for the API. Please check
+ // [OAuth 2.0 Scopes for Google
+ // APIs](https://developers.google.com/identity/protocols/oauth2/scopes) for
+ // the list of the OAuth 2.0 scopes that you might need to request to access
+ // the API.
+ //
+ // Example of an ErrorInfo when the request is calling Cloud Storage service
+ // with an access token that is missing required scopes:
+ //
+ // { "reason": "ACCESS_TOKEN_SCOPE_INSUFFICIENT",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "service": "storage.googleapis.com",
+ // "method": "google.storage.v1.Storage.GetObject"
+ // }
+ // }
+ ACCESS_TOKEN_SCOPE_INSUFFICIENT = 17;
+
+ // The request is denied because the account associated with the provided
+ // access token is in an invalid state, such as disabled or deleted.
+ // For more information, see https://cloud.google.com/docs/authentication.
+ //
+ // Warning: For privacy reasons, the server may not be able to disclose the
+ // email address for some accounts. The client MUST NOT depend on the
+ // availability of the `email` attribute.
+ //
+ // Example of an ErrorInfo when the request is to the Cloud Storage API with
+ // an access token that is associated with a disabled or deleted [service
+ // account](http://cloud/iam/docs/service-accounts):
+ //
+ // { "reason": "ACCOUNT_STATE_INVALID",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "service": "storage.googleapis.com",
+ // "method": "google.storage.v1.Storage.GetObject",
+ // "email": "user@123.iam.gserviceaccount.com"
+ // }
+ // }
+ ACCOUNT_STATE_INVALID = 18;
+
+ // The request is denied because the type of the provided access token is not
+ // supported by the API being called.
+ //
+ // Example of an ErrorInfo when the request is to the Cloud Storage API with
+ // an unsupported token type.
+ //
+ // { "reason": "ACCESS_TOKEN_TYPE_UNSUPPORTED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "service": "storage.googleapis.com",
+ // "method": "google.storage.v1.Storage.GetObject"
+ // }
+ // }
+ ACCESS_TOKEN_TYPE_UNSUPPORTED = 19;
+
+ // The request is denied because the request doesn't have any authentication
+ // credentials. For more information regarding the supported authentication
+ // strategies for Google Cloud APIs, see
+ // https://cloud.google.com/docs/authentication.
+ //
+ // Example of an ErrorInfo when the request is to the Cloud Storage API
+ // without any authentication credentials.
+ //
+ // { "reason": "CREDENTIALS_MISSING",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "service": "storage.googleapis.com",
+ // "method": "google.storage.v1.Storage.GetObject"
+ // }
+ // }
+ CREDENTIALS_MISSING = 20;
+
+ // The request is denied because the provided project owning the resource
+ // which acts as the [API
+ // consumer](https://cloud.google.com/apis/design/glossary#api_consumer) is
+ // invalid. It may be in a bad format or empty.
+ //
+ // Example of an ErrorInfo when the request is to the Cloud Functions API,
+ // but the offered resource project in the request in a bad format which can't
+ // perform the ListFunctions method.
+ //
+ // { "reason": "RESOURCE_PROJECT_INVALID",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "service": "cloudfunctions.googleapis.com",
+ // "method":
+ // "google.cloud.functions.v1.CloudFunctionsService.ListFunctions"
+ // }
+ // }
+ RESOURCE_PROJECT_INVALID = 21;
+
+ // The request is denied because the provided session cookie is missing,
+ // invalid or failed to decode.
+ //
+ // Example of an ErrorInfo when the request is calling Cloud Storage service
+ // with a SID cookie which can't be decoded.
+ //
+ // { "reason": "SESSION_COOKIE_INVALID",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "service": "storage.googleapis.com",
+ // "method": "google.storage.v1.Storage.GetObject",
+ // "cookie": "SID"
+ // }
+ // }
+ SESSION_COOKIE_INVALID = 23;
+
+ // The request is denied because the user is from a Google Workspace customer
+ // that blocks their users from accessing a particular service.
+ //
+ // Example scenario: https://support.google.com/a/answer/9197205?hl=en
+ //
+ // Example of an ErrorInfo when access to Google Cloud Storage service is
+ // blocked by the Google Workspace administrator:
+ //
+ // { "reason": "USER_BLOCKED_BY_ADMIN",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "service": "storage.googleapis.com",
+ // "method": "google.storage.v1.Storage.GetObject",
+ // }
+ // }
+ USER_BLOCKED_BY_ADMIN = 24;
+
+ // The request is denied because the resource service usage is restricted
+ // by administrators according to the organization policy constraint.
+ // For more information see
+ // https://cloud.google.com/resource-manager/docs/organization-policy/restricting-services.
+ //
+ // Example of an ErrorInfo when access to Google Cloud Storage service is
+ // restricted by Resource Usage Restriction policy:
+ //
+ // { "reason": "RESOURCE_USAGE_RESTRICTION_VIOLATED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/project-123",
+ // "service": "storage.googleapis.com"
+ // }
+ // }
+ RESOURCE_USAGE_RESTRICTION_VIOLATED = 25;
+
+ // Unimplemented. Do not use.
+ //
+ // The request is denied because it contains unsupported system parameters in
+ // URL query parameters or HTTP headers. For more information,
+ // see https://cloud.google.com/apis/docs/system-parameters
+ //
+ // Example of an ErrorInfo when access "pubsub.googleapis.com" service with
+ // a request header of "x-goog-user-ip":
+ //
+ // { "reason": "SYSTEM_PARAMETER_UNSUPPORTED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "service": "pubsub.googleapis.com"
+ // "parameter": "x-goog-user-ip"
+ // }
+ // }
+ SYSTEM_PARAMETER_UNSUPPORTED = 26;
+
+ // The request is denied because it violates Org Restriction: the requested
+ // resource does not belong to allowed organizations specified in
+ // "X-Goog-Allowed-Resources" header.
+ //
+ // Example of an ErrorInfo when accessing a GCP resource that is restricted by
+ // Org Restriction for "pubsub.googleapis.com" service.
+ //
+ // {
+ // reason: "ORG_RESTRICTION_VIOLATION"
+ // domain: "googleapis.com"
+ // metadata {
+ // "consumer":"projects/123456"
+ // "service": "pubsub.googleapis.com"
+ // }
+ // }
+ ORG_RESTRICTION_VIOLATION = 27;
+
+ // The request is denied because "X-Goog-Allowed-Resources" header is in a bad
+ // format.
+ //
+ // Example of an ErrorInfo when
+ // accessing "pubsub.googleapis.com" service with an invalid
+ // "X-Goog-Allowed-Resources" request header.
+ //
+ // {
+ // reason: "ORG_RESTRICTION_HEADER_INVALID"
+ // domain: "googleapis.com"
+ // metadata {
+ // "consumer":"projects/123456"
+ // "service": "pubsub.googleapis.com"
+ // }
+ // }
+ ORG_RESTRICTION_HEADER_INVALID = 28;
+
+ // Unimplemented. Do not use.
+ //
+ // The request is calling a service that is not visible to the consumer.
+ //
+ // Example of an ErrorInfo when the consumer "projects/123" contacting
+ // "pubsub.googleapis.com" service which is not visible to the consumer.
+ //
+ // { "reason": "SERVICE_NOT_VISIBLE",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "pubsub.googleapis.com"
+ // }
+ // }
+ //
+ // This response indicates the "pubsub.googleapis.com" is not visible to
+ // "projects/123" (or it may not exist).
+ SERVICE_NOT_VISIBLE = 29;
+
+ // The request is related to a project for which GCP access is suspended.
+ //
+ // Example of an ErrorInfo when the consumer "projects/123" fails to contact
+ // "pubsub.googleapis.com" service because GCP access is suspended:
+ //
+ // { "reason": "GCP_SUSPENDED",
+ // "domain": "googleapis.com",
+ // "metadata": {
+ // "consumer": "projects/123",
+ // "service": "pubsub.googleapis.com"
+ // }
+ // }
+ //
+ // This response indicates the associated GCP account has been suspended.
+ GCP_SUSPENDED = 30;
+}
diff --git a/Lib/site-packages/google/api/error_reason_pb2.py b/Lib/site-packages/google/api/error_reason_pb2.py
new file mode 100644
index 0000000..17f04a4
--- /dev/null
+++ b/Lib/site-packages/google/api/error_reason_pb2.py
@@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/error_reason.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b"\n\x1dgoogle/api/error_reason.proto\x12\ngoogle.api*\xec\x06\n\x0b\x45rrorReason\x12\x1c\n\x18\x45RROR_REASON_UNSPECIFIED\x10\x00\x12\x14\n\x10SERVICE_DISABLED\x10\x01\x12\x14\n\x10\x42ILLING_DISABLED\x10\x02\x12\x13\n\x0f\x41PI_KEY_INVALID\x10\x03\x12\x1b\n\x17\x41PI_KEY_SERVICE_BLOCKED\x10\x04\x12!\n\x1d\x41PI_KEY_HTTP_REFERRER_BLOCKED\x10\x07\x12\x1e\n\x1a\x41PI_KEY_IP_ADDRESS_BLOCKED\x10\x08\x12\x1f\n\x1b\x41PI_KEY_ANDROID_APP_BLOCKED\x10\t\x12\x1b\n\x17\x41PI_KEY_IOS_APP_BLOCKED\x10\r\x12\x17\n\x13RATE_LIMIT_EXCEEDED\x10\x05\x12\x1b\n\x17RESOURCE_QUOTA_EXCEEDED\x10\x06\x12 \n\x1cLOCATION_TAX_POLICY_VIOLATED\x10\n\x12\x17\n\x13USER_PROJECT_DENIED\x10\x0b\x12\x16\n\x12\x43ONSUMER_SUSPENDED\x10\x0c\x12\x14\n\x10\x43ONSUMER_INVALID\x10\x0e\x12\x1c\n\x18SECURITY_POLICY_VIOLATED\x10\x0f\x12\x18\n\x14\x41\x43\x43\x45SS_TOKEN_EXPIRED\x10\x10\x12#\n\x1f\x41\x43\x43\x45SS_TOKEN_SCOPE_INSUFFICIENT\x10\x11\x12\x19\n\x15\x41\x43\x43OUNT_STATE_INVALID\x10\x12\x12!\n\x1d\x41\x43\x43\x45SS_TOKEN_TYPE_UNSUPPORTED\x10\x13\x12\x17\n\x13\x43REDENTIALS_MISSING\x10\x14\x12\x1c\n\x18RESOURCE_PROJECT_INVALID\x10\x15\x12\x1a\n\x16SESSION_COOKIE_INVALID\x10\x17\x12\x19\n\x15USER_BLOCKED_BY_ADMIN\x10\x18\x12'\n#RESOURCE_USAGE_RESTRICTION_VIOLATED\x10\x19\x12 \n\x1cSYSTEM_PARAMETER_UNSUPPORTED\x10\x1a\x12\x1d\n\x19ORG_RESTRICTION_VIOLATION\x10\x1b\x12\"\n\x1eORG_RESTRICTION_HEADER_INVALID\x10\x1c\x12\x17\n\x13SERVICE_NOT_VISIBLE\x10\x1d\x12\x11\n\rGCP_SUSPENDED\x10\x1e\x42p\n\x0e\x63om.google.apiB\x10\x45rrorReasonProtoP\x01ZCgoogle.golang.org/genproto/googleapis/api/error_reason;error_reason\xa2\x02\x04GAPIb\x06proto3"
+)
+
+_ERRORREASON = DESCRIPTOR.enum_types_by_name["ErrorReason"]
+ErrorReason = enum_type_wrapper.EnumTypeWrapper(_ERRORREASON)
+ERROR_REASON_UNSPECIFIED = 0
+SERVICE_DISABLED = 1
+BILLING_DISABLED = 2
+API_KEY_INVALID = 3
+API_KEY_SERVICE_BLOCKED = 4
+API_KEY_HTTP_REFERRER_BLOCKED = 7
+API_KEY_IP_ADDRESS_BLOCKED = 8
+API_KEY_ANDROID_APP_BLOCKED = 9
+API_KEY_IOS_APP_BLOCKED = 13
+RATE_LIMIT_EXCEEDED = 5
+RESOURCE_QUOTA_EXCEEDED = 6
+LOCATION_TAX_POLICY_VIOLATED = 10
+USER_PROJECT_DENIED = 11
+CONSUMER_SUSPENDED = 12
+CONSUMER_INVALID = 14
+SECURITY_POLICY_VIOLATED = 15
+ACCESS_TOKEN_EXPIRED = 16
+ACCESS_TOKEN_SCOPE_INSUFFICIENT = 17
+ACCOUNT_STATE_INVALID = 18
+ACCESS_TOKEN_TYPE_UNSUPPORTED = 19
+CREDENTIALS_MISSING = 20
+RESOURCE_PROJECT_INVALID = 21
+SESSION_COOKIE_INVALID = 23
+USER_BLOCKED_BY_ADMIN = 24
+RESOURCE_USAGE_RESTRICTION_VIOLATED = 25
+SYSTEM_PARAMETER_UNSUPPORTED = 26
+ORG_RESTRICTION_VIOLATION = 27
+ORG_RESTRICTION_HEADER_INVALID = 28
+SERVICE_NOT_VISIBLE = 29
+GCP_SUSPENDED = 30
+
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\020ErrorReasonProtoP\001ZCgoogle.golang.org/genproto/googleapis/api/error_reason;error_reason\242\002\004GAPI"
+ _ERRORREASON._serialized_start = 46
+ _ERRORREASON._serialized_end = 922
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/field_behavior.proto b/Lib/site-packages/google/api/field_behavior.proto
new file mode 100644
index 0000000..344cb0b
--- /dev/null
+++ b/Lib/site-packages/google/api/field_behavior.proto
@@ -0,0 +1,104 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+import "google/protobuf/descriptor.proto";
+
+option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations";
+option java_multiple_files = true;
+option java_outer_classname = "FieldBehaviorProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+extend google.protobuf.FieldOptions {
+ // A designation of a specific field behavior (required, output only, etc.)
+ // in protobuf messages.
+ //
+ // Examples:
+ //
+ // string name = 1 [(google.api.field_behavior) = REQUIRED];
+ // State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY];
+ // google.protobuf.Duration ttl = 1
+ // [(google.api.field_behavior) = INPUT_ONLY];
+ // google.protobuf.Timestamp expire_time = 1
+ // [(google.api.field_behavior) = OUTPUT_ONLY,
+ // (google.api.field_behavior) = IMMUTABLE];
+ repeated google.api.FieldBehavior field_behavior = 1052;
+}
+
+// An indicator of the behavior of a given field (for example, that a field
+// is required in requests, or given as output but ignored as input).
+// This **does not** change the behavior in protocol buffers itself; it only
+// denotes the behavior and may affect how API tooling handles the field.
+//
+// Note: This enum **may** receive new values in the future.
+enum FieldBehavior {
+ // Conventional default for enums. Do not use this.
+ FIELD_BEHAVIOR_UNSPECIFIED = 0;
+
+ // Specifically denotes a field as optional.
+ // While all fields in protocol buffers are optional, this may be specified
+ // for emphasis if appropriate.
+ OPTIONAL = 1;
+
+ // Denotes a field as required.
+ // This indicates that the field **must** be provided as part of the request,
+ // and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
+ REQUIRED = 2;
+
+ // Denotes a field as output only.
+ // This indicates that the field is provided in responses, but including the
+ // field in a request does nothing (the server *must* ignore it and
+ // *must not* throw an error as a result of the field's presence).
+ OUTPUT_ONLY = 3;
+
+ // Denotes a field as input only.
+ // This indicates that the field is provided in requests, and the
+ // corresponding field is not included in output.
+ INPUT_ONLY = 4;
+
+ // Denotes a field as immutable.
+ // This indicates that the field may be set once in a request to create a
+ // resource, but may not be changed thereafter.
+ IMMUTABLE = 5;
+
+ // Denotes that a (repeated) field is an unordered list.
+ // This indicates that the service may provide the elements of the list
+ // in any arbitrary order, rather than the order the user originally
+ // provided. Additionally, the list's order may or may not be stable.
+ UNORDERED_LIST = 6;
+
+ // Denotes that this field returns a non-empty default value if not set.
+ // This indicates that if the user provides the empty value in a request,
+ // a non-empty value will be returned. The user will not be aware of what
+ // non-empty value to expect.
+ NON_EMPTY_DEFAULT = 7;
+
+ // Denotes that the field in a resource (a message annotated with
+ // google.api.resource) is used in the resource name to uniquely identify the
+ // resource. For AIP-compliant APIs, this should only be applied to the
+ // `name` field on the resource.
+ //
+ // This behavior should not be applied to references to other resources within
+ // the message.
+ //
+ // The identifier field of resources often have different field behavior
+ // depending on the request it is embedded in (e.g. for Create methods name
+ // is optional and unused, while for Update methods it is required). Instead
+ // of method-specific annotations, only `IDENTIFIER` is required.
+ IDENTIFIER = 8;
+}
diff --git a/Lib/site-packages/google/api/field_behavior_pb2.py b/Lib/site-packages/google/api/field_behavior_pb2.py
new file mode 100644
index 0000000..bf4ca63
--- /dev/null
+++ b/Lib/site-packages/google/api/field_behavior_pb2.py
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/field_behavior.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b"\n\x1fgoogle/api/field_behavior.proto\x12\ngoogle.api\x1a google/protobuf/descriptor.proto*\xb6\x01\n\rFieldBehavior\x12\x1e\n\x1a\x46IELD_BEHAVIOR_UNSPECIFIED\x10\x00\x12\x0c\n\x08OPTIONAL\x10\x01\x12\x0c\n\x08REQUIRED\x10\x02\x12\x0f\n\x0bOUTPUT_ONLY\x10\x03\x12\x0e\n\nINPUT_ONLY\x10\x04\x12\r\n\tIMMUTABLE\x10\x05\x12\x12\n\x0eUNORDERED_LIST\x10\x06\x12\x15\n\x11NON_EMPTY_DEFAULT\x10\x07\x12\x0e\n\nIDENTIFIER\x10\x08:Q\n\x0e\x66ield_behavior\x12\x1d.google.protobuf.FieldOptions\x18\x9c\x08 \x03(\x0e\x32\x19.google.api.FieldBehaviorBp\n\x0e\x63om.google.apiB\x12\x46ieldBehaviorProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xa2\x02\x04GAPIb\x06proto3"
+)
+
+_FIELDBEHAVIOR = DESCRIPTOR.enum_types_by_name["FieldBehavior"]
+FieldBehavior = enum_type_wrapper.EnumTypeWrapper(_FIELDBEHAVIOR)
+FIELD_BEHAVIOR_UNSPECIFIED = 0
+OPTIONAL = 1
+REQUIRED = 2
+OUTPUT_ONLY = 3
+INPUT_ONLY = 4
+IMMUTABLE = 5
+UNORDERED_LIST = 6
+NON_EMPTY_DEFAULT = 7
+IDENTIFIER = 8
+
+FIELD_BEHAVIOR_FIELD_NUMBER = 1052
+field_behavior = DESCRIPTOR.extensions_by_name["field_behavior"]
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+ google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(
+ field_behavior
+ )
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\022FieldBehaviorProtoP\001ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\242\002\004GAPI"
+ _FIELDBEHAVIOR._serialized_start = 82
+ _FIELDBEHAVIOR._serialized_end = 264
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/field_info.proto b/Lib/site-packages/google/api/field_info.proto
new file mode 100644
index 0000000..dd66340
--- /dev/null
+++ b/Lib/site-packages/google/api/field_info.proto
@@ -0,0 +1,79 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+import "google/protobuf/descriptor.proto";
+
+option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations";
+option java_multiple_files = true;
+option java_outer_classname = "FieldInfoProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+extend google.protobuf.FieldOptions {
+ // Rich semantic descriptor of an API field beyond the basic typing.
+ //
+ // Examples:
+ //
+ // string request_id = 1 [(google.api.field_info).format = UUID4];
+ // string old_ip_address = 2 [(google.api.field_info).format = IPV4];
+ // string new_ip_address = 3 [(google.api.field_info).format = IPV6];
+ // string actual_ip_address = 4 [
+ // (google.api.field_info).format = IPV4_OR_IPV6
+ // ];
+ google.api.FieldInfo field_info = 291403980;
+}
+
+// Rich semantic information of an API field beyond basic typing.
+message FieldInfo {
+ // The standard format of a field value. The supported formats are all backed
+ // by either an RFC defined by the IETF or a Google-defined AIP.
+ enum Format {
+ // Default, unspecified value.
+ FORMAT_UNSPECIFIED = 0;
+
+ // Universally Unique Identifier, version 4, value as defined by
+ // https://datatracker.ietf.org/doc/html/rfc4122. The value may be
+ // normalized to entirely lowercase letters. For example, the value
+ // `F47AC10B-58CC-0372-8567-0E02B2C3D479` would be normalized to
+ // `f47ac10b-58cc-0372-8567-0e02b2c3d479`.
+ UUID4 = 1;
+
+ // Internet Protocol v4 value as defined by [RFC
+ // 791](https://datatracker.ietf.org/doc/html/rfc791). The value may be
+ // condensed, with leading zeros in each octet stripped. For example,
+ // `001.022.233.040` would be condensed to `1.22.233.40`.
+ IPV4 = 2;
+
+ // Internet Protocol v6 value as defined by [RFC
+ // 2460](https://datatracker.ietf.org/doc/html/rfc2460). The value may be
+ // normalized to entirely lowercase letters, and zero-padded partial and
+ // empty octets. For example, the value `2001:DB8::` would be normalized to
+ // `2001:0db8:0:0`.
+ IPV6 = 3;
+
+ // An IP address in either v4 or v6 format as described by the individual
+ // values defined herein. See the comments on the IPV4 and IPV6 types for
+ // allowed normalizations of each.
+ IPV4_OR_IPV6 = 4;
+ }
+
+ // The standard format of a field value. This does not explicitly configure
+ // any API consumer, just documents the API's format for the field it is
+ // applied to.
+ Format format = 1;
+}
diff --git a/Lib/site-packages/google/api/field_info_pb2.py b/Lib/site-packages/google/api/field_info_pb2.py
new file mode 100644
index 0000000..2c80b38
--- /dev/null
+++ b/Lib/site-packages/google/api/field_info_pb2.py
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/field_info.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x1bgoogle/api/field_info.proto\x12\ngoogle.api\x1a google/protobuf/descriptor.proto"\x8c\x01\n\tFieldInfo\x12,\n\x06\x66ormat\x18\x01 \x01(\x0e\x32\x1c.google.api.FieldInfo.Format"Q\n\x06\x46ormat\x12\x16\n\x12\x46ORMAT_UNSPECIFIED\x10\x00\x12\t\n\x05UUID4\x10\x01\x12\x08\n\x04IPV4\x10\x02\x12\x08\n\x04IPV6\x10\x03\x12\x10\n\x0cIPV4_OR_IPV6\x10\x04:L\n\nfield_info\x12\x1d.google.protobuf.FieldOptions\x18\xcc\xf1\xf9\x8a\x01 \x01(\x0b\x32\x15.google.api.FieldInfoBl\n\x0e\x63om.google.apiB\x0e\x46ieldInfoProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+FIELD_INFO_FIELD_NUMBER = 291403980
+field_info = DESCRIPTOR.extensions_by_name["field_info"]
+
+_FIELDINFO = DESCRIPTOR.message_types_by_name["FieldInfo"]
+_FIELDINFO_FORMAT = _FIELDINFO.enum_types_by_name["Format"]
+FieldInfo = _reflection.GeneratedProtocolMessageType(
+ "FieldInfo",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _FIELDINFO,
+ "__module__": "google.api.field_info_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.FieldInfo)
+ },
+)
+_sym_db.RegisterMessage(FieldInfo)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+ google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(field_info)
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\016FieldInfoProtoP\001ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\242\002\004GAPI"
+ _FIELDINFO._serialized_start = 78
+ _FIELDINFO._serialized_end = 218
+ _FIELDINFO_FORMAT._serialized_start = 137
+ _FIELDINFO_FORMAT._serialized_end = 218
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/http.proto b/Lib/site-packages/google/api/http.proto
new file mode 100644
index 0000000..31d867a
--- /dev/null
+++ b/Lib/site-packages/google/api/http.proto
@@ -0,0 +1,379 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations";
+option java_multiple_files = true;
+option java_outer_classname = "HttpProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// Defines the HTTP configuration for an API service. It contains a list of
+// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method
+// to one or more HTTP REST API methods.
+message Http {
+ // A list of HTTP configuration rules that apply to individual API methods.
+ //
+ // **NOTE:** All service configuration rules follow "last one wins" order.
+ repeated HttpRule rules = 1;
+
+ // When set to true, URL path parameters will be fully URI-decoded except in
+ // cases of single segment matches in reserved expansion, where "%2F" will be
+ // left encoded.
+ //
+ // The default behavior is to not decode RFC 6570 reserved characters in multi
+ // segment matches.
+ bool fully_decode_reserved_expansion = 2;
+}
+
+// # gRPC Transcoding
+//
+// gRPC Transcoding is a feature for mapping between a gRPC method and one or
+// more HTTP REST endpoints. It allows developers to build a single API service
+// that supports both gRPC APIs and REST APIs. Many systems, including [Google
+// APIs](https://github.com/googleapis/googleapis),
+// [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC
+// Gateway](https://github.com/grpc-ecosystem/grpc-gateway),
+// and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature
+// and use it for large scale production services.
+//
+// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies
+// how different portions of the gRPC request message are mapped to the URL
+// path, URL query parameters, and HTTP request body. It also controls how the
+// gRPC response message is mapped to the HTTP response body. `HttpRule` is
+// typically specified as an `google.api.http` annotation on the gRPC method.
+//
+// Each mapping specifies a URL path template and an HTTP method. The path
+// template may refer to one or more fields in the gRPC request message, as long
+// as each field is a non-repeated field with a primitive (non-message) type.
+// The path template controls how fields of the request message are mapped to
+// the URL path.
+//
+// Example:
+//
+// service Messaging {
+// rpc GetMessage(GetMessageRequest) returns (Message) {
+// option (google.api.http) = {
+// get: "/v1/{name=messages/*}"
+// };
+// }
+// }
+// message GetMessageRequest {
+// string name = 1; // Mapped to URL path.
+// }
+// message Message {
+// string text = 1; // The resource content.
+// }
+//
+// This enables an HTTP REST to gRPC mapping as below:
+//
+// HTTP | gRPC
+// -----|-----
+// `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")`
+//
+// Any fields in the request message which are not bound by the path template
+// automatically become HTTP query parameters if there is no HTTP request body.
+// For example:
+//
+// service Messaging {
+// rpc GetMessage(GetMessageRequest) returns (Message) {
+// option (google.api.http) = {
+// get:"/v1/messages/{message_id}"
+// };
+// }
+// }
+// message GetMessageRequest {
+// message SubMessage {
+// string subfield = 1;
+// }
+// string message_id = 1; // Mapped to URL path.
+// int64 revision = 2; // Mapped to URL query parameter `revision`.
+// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`.
+// }
+//
+// This enables a HTTP JSON to RPC mapping as below:
+//
+// HTTP | gRPC
+// -----|-----
+// `GET /v1/messages/123456?revision=2&sub.subfield=foo` |
+// `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield:
+// "foo"))`
+//
+// Note that fields which are mapped to URL query parameters must have a
+// primitive type or a repeated primitive type or a non-repeated message type.
+// In the case of a repeated type, the parameter can be repeated in the URL
+// as `...?param=A¶m=B`. In the case of a message type, each field of the
+// message is mapped to a separate parameter, such as
+// `...?foo.a=A&foo.b=B&foo.c=C`.
+//
+// For HTTP methods that allow a request body, the `body` field
+// specifies the mapping. Consider a REST update method on the
+// message resource collection:
+//
+// service Messaging {
+// rpc UpdateMessage(UpdateMessageRequest) returns (Message) {
+// option (google.api.http) = {
+// patch: "/v1/messages/{message_id}"
+// body: "message"
+// };
+// }
+// }
+// message UpdateMessageRequest {
+// string message_id = 1; // mapped to the URL
+// Message message = 2; // mapped to the body
+// }
+//
+// The following HTTP JSON to RPC mapping is enabled, where the
+// representation of the JSON in the request body is determined by
+// protos JSON encoding:
+//
+// HTTP | gRPC
+// -----|-----
+// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id:
+// "123456" message { text: "Hi!" })`
+//
+// The special name `*` can be used in the body mapping to define that
+// every field not bound by the path template should be mapped to the
+// request body. This enables the following alternative definition of
+// the update method:
+//
+// service Messaging {
+// rpc UpdateMessage(Message) returns (Message) {
+// option (google.api.http) = {
+// patch: "/v1/messages/{message_id}"
+// body: "*"
+// };
+// }
+// }
+// message Message {
+// string message_id = 1;
+// string text = 2;
+// }
+//
+//
+// The following HTTP JSON to RPC mapping is enabled:
+//
+// HTTP | gRPC
+// -----|-----
+// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id:
+// "123456" text: "Hi!")`
+//
+// Note that when using `*` in the body mapping, it is not possible to
+// have HTTP parameters, as all fields not bound by the path end in
+// the body. This makes this option more rarely used in practice when
+// defining REST APIs. The common usage of `*` is in custom methods
+// which don't use the URL at all for transferring data.
+//
+// It is possible to define multiple HTTP methods for one RPC by using
+// the `additional_bindings` option. Example:
+//
+// service Messaging {
+// rpc GetMessage(GetMessageRequest) returns (Message) {
+// option (google.api.http) = {
+// get: "/v1/messages/{message_id}"
+// additional_bindings {
+// get: "/v1/users/{user_id}/messages/{message_id}"
+// }
+// };
+// }
+// }
+// message GetMessageRequest {
+// string message_id = 1;
+// string user_id = 2;
+// }
+//
+// This enables the following two alternative HTTP JSON to RPC mappings:
+//
+// HTTP | gRPC
+// -----|-----
+// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")`
+// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id:
+// "123456")`
+//
+// ## Rules for HTTP mapping
+//
+// 1. Leaf request fields (recursive expansion nested messages in the request
+// message) are classified into three categories:
+// - Fields referred by the path template. They are passed via the URL path.
+// - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They
+// are passed via the HTTP
+// request body.
+// - All other fields are passed via the URL query parameters, and the
+// parameter name is the field path in the request message. A repeated
+// field can be represented as multiple query parameters under the same
+// name.
+// 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL
+// query parameter, all fields
+// are passed via URL path and HTTP request body.
+// 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP
+// request body, all
+// fields are passed via URL path and URL query parameters.
+//
+// ### Path template syntax
+//
+// Template = "/" Segments [ Verb ] ;
+// Segments = Segment { "/" Segment } ;
+// Segment = "*" | "**" | LITERAL | Variable ;
+// Variable = "{" FieldPath [ "=" Segments ] "}" ;
+// FieldPath = IDENT { "." IDENT } ;
+// Verb = ":" LITERAL ;
+//
+// The syntax `*` matches a single URL path segment. The syntax `**` matches
+// zero or more URL path segments, which must be the last part of the URL path
+// except the `Verb`.
+//
+// The syntax `Variable` matches part of the URL path as specified by its
+// template. A variable template must not contain other variables. If a variable
+// matches a single path segment, its template may be omitted, e.g. `{var}`
+// is equivalent to `{var=*}`.
+//
+// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL`
+// contains any reserved character, such characters should be percent-encoded
+// before the matching.
+//
+// If a variable contains exactly one path segment, such as `"{var}"` or
+// `"{var=*}"`, when such a variable is expanded into a URL path on the client
+// side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The
+// server side does the reverse decoding. Such variables show up in the
+// [Discovery
+// Document](https://developers.google.com/discovery/v1/reference/apis) as
+// `{var}`.
+//
+// If a variable contains multiple path segments, such as `"{var=foo/*}"`
+// or `"{var=**}"`, when such a variable is expanded into a URL path on the
+// client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded.
+// The server side does the reverse decoding, except "%2F" and "%2f" are left
+// unchanged. Such variables show up in the
+// [Discovery
+// Document](https://developers.google.com/discovery/v1/reference/apis) as
+// `{+var}`.
+//
+// ## Using gRPC API Service Configuration
+//
+// gRPC API Service Configuration (service config) is a configuration language
+// for configuring a gRPC service to become a user-facing product. The
+// service config is simply the YAML representation of the `google.api.Service`
+// proto message.
+//
+// As an alternative to annotating your proto file, you can configure gRPC
+// transcoding in your service config YAML files. You do this by specifying a
+// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same
+// effect as the proto annotation. This can be particularly useful if you
+// have a proto that is reused in multiple services. Note that any transcoding
+// specified in the service config will override any matching transcoding
+// configuration in the proto.
+//
+// Example:
+//
+// http:
+// rules:
+// # Selects a gRPC method and applies HttpRule to it.
+// - selector: example.v1.Messaging.GetMessage
+// get: /v1/messages/{message_id}/{sub.subfield}
+//
+// ## Special notes
+//
+// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the
+// proto to JSON conversion must follow the [proto3
+// specification](https://developers.google.com/protocol-buffers/docs/proto3#json).
+//
+// While the single segment variable follows the semantics of
+// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String
+// Expansion, the multi segment variable **does not** follow RFC 6570 Section
+// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion
+// does not expand special characters like `?` and `#`, which would lead
+// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding
+// for multi segment variables.
+//
+// The path variables **must not** refer to any repeated or mapped field,
+// because client libraries are not capable of handling such variable expansion.
+//
+// The path variables **must not** capture the leading "/" character. The reason
+// is that the most common use case "{var}" does not capture the leading "/"
+// character. For consistency, all path variables must share the same behavior.
+//
+// Repeated message fields must not be mapped to URL query parameters, because
+// no client library can support such complicated mapping.
+//
+// If an API needs to use a JSON array for request or response body, it can map
+// the request or response body to a repeated field. However, some gRPC
+// Transcoding implementations may not support this feature.
+message HttpRule {
+ // Selects a method to which this rule applies.
+ //
+ // Refer to [selector][google.api.DocumentationRule.selector] for syntax
+ // details.
+ string selector = 1;
+
+ // Determines the URL pattern is matched by this rules. This pattern can be
+ // used with any of the {get|put|post|delete|patch} methods. A custom method
+ // can be defined using the 'custom' field.
+ oneof pattern {
+ // Maps to HTTP GET. Used for listing and getting information about
+ // resources.
+ string get = 2;
+
+ // Maps to HTTP PUT. Used for replacing a resource.
+ string put = 3;
+
+ // Maps to HTTP POST. Used for creating a resource or performing an action.
+ string post = 4;
+
+ // Maps to HTTP DELETE. Used for deleting a resource.
+ string delete = 5;
+
+ // Maps to HTTP PATCH. Used for updating a resource.
+ string patch = 6;
+
+ // The custom pattern is used for specifying an HTTP method that is not
+ // included in the `pattern` field, such as HEAD, or "*" to leave the
+ // HTTP method unspecified for this rule. The wild-card rule is useful
+ // for services that provide content to Web (HTML) clients.
+ CustomHttpPattern custom = 8;
+ }
+
+ // The name of the request field whose value is mapped to the HTTP request
+ // body, or `*` for mapping all request fields not captured by the path
+ // pattern to the HTTP body, or omitted for not having any HTTP request body.
+ //
+ // NOTE: the referred field must be present at the top-level of the request
+ // message type.
+ string body = 7;
+
+ // Optional. The name of the response field whose value is mapped to the HTTP
+ // response body. When omitted, the entire response message will be used
+ // as the HTTP response body.
+ //
+ // NOTE: The referred field must be present at the top-level of the response
+ // message type.
+ string response_body = 12;
+
+ // Additional HTTP bindings for the selector. Nested bindings must
+ // not contain an `additional_bindings` field themselves (that is,
+ // the nesting may only be one level deep).
+ repeated HttpRule additional_bindings = 11;
+}
+
+// A custom pattern is used for defining custom HTTP verb.
+message CustomHttpPattern {
+ // The name of this custom HTTP verb.
+ string kind = 1;
+
+ // The path matched by this custom verb.
+ string path = 2;
+}
diff --git a/Lib/site-packages/google/api/http_pb2.py b/Lib/site-packages/google/api/http_pb2.py
new file mode 100644
index 0000000..c0d4c4c
--- /dev/null
+++ b/Lib/site-packages/google/api/http_pb2.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/http.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x15google/api/http.proto\x12\ngoogle.api"T\n\x04Http\x12#\n\x05rules\x18\x01 \x03(\x0b\x32\x14.google.api.HttpRule\x12\'\n\x1f\x66ully_decode_reserved_expansion\x18\x02 \x01(\x08"\x81\x02\n\x08HttpRule\x12\x10\n\x08selector\x18\x01 \x01(\t\x12\r\n\x03get\x18\x02 \x01(\tH\x00\x12\r\n\x03put\x18\x03 \x01(\tH\x00\x12\x0e\n\x04post\x18\x04 \x01(\tH\x00\x12\x10\n\x06\x64\x65lete\x18\x05 \x01(\tH\x00\x12\x0f\n\x05patch\x18\x06 \x01(\tH\x00\x12/\n\x06\x63ustom\x18\x08 \x01(\x0b\x32\x1d.google.api.CustomHttpPatternH\x00\x12\x0c\n\x04\x62ody\x18\x07 \x01(\t\x12\x15\n\rresponse_body\x18\x0c \x01(\t\x12\x31\n\x13\x61\x64\x64itional_bindings\x18\x0b \x03(\x0b\x32\x14.google.api.HttpRuleB\t\n\x07pattern"/\n\x11\x43ustomHttpPattern\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\tBj\n\x0e\x63om.google.apiB\tHttpProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xf8\x01\x01\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+_HTTP = DESCRIPTOR.message_types_by_name["Http"]
+_HTTPRULE = DESCRIPTOR.message_types_by_name["HttpRule"]
+_CUSTOMHTTPPATTERN = DESCRIPTOR.message_types_by_name["CustomHttpPattern"]
+Http = _reflection.GeneratedProtocolMessageType(
+ "Http",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _HTTP,
+ "__module__": "google.api.http_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.Http)
+ },
+)
+_sym_db.RegisterMessage(Http)
+
+HttpRule = _reflection.GeneratedProtocolMessageType(
+ "HttpRule",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _HTTPRULE,
+ "__module__": "google.api.http_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.HttpRule)
+ },
+)
+_sym_db.RegisterMessage(HttpRule)
+
+CustomHttpPattern = _reflection.GeneratedProtocolMessageType(
+ "CustomHttpPattern",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _CUSTOMHTTPPATTERN,
+ "__module__": "google.api.http_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.CustomHttpPattern)
+ },
+)
+_sym_db.RegisterMessage(CustomHttpPattern)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\tHttpProtoP\001ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\370\001\001\242\002\004GAPI"
+ _HTTP._serialized_start = 37
+ _HTTP._serialized_end = 121
+ _HTTPRULE._serialized_start = 124
+ _HTTPRULE._serialized_end = 381
+ _CUSTOMHTTPPATTERN._serialized_start = 383
+ _CUSTOMHTTPPATTERN._serialized_end = 430
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/httpbody.proto b/Lib/site-packages/google/api/httpbody.proto
new file mode 100644
index 0000000..7f1685e
--- /dev/null
+++ b/Lib/site-packages/google/api/httpbody.proto
@@ -0,0 +1,81 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+import "google/protobuf/any.proto";
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/api/httpbody;httpbody";
+option java_multiple_files = true;
+option java_outer_classname = "HttpBodyProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// Message that represents an arbitrary HTTP body. It should only be used for
+// payload formats that can't be represented as JSON, such as raw binary or
+// an HTML page.
+//
+//
+// This message can be used both in streaming and non-streaming API methods in
+// the request as well as the response.
+//
+// It can be used as a top-level request field, which is convenient if one
+// wants to extract parameters from either the URL or HTTP template into the
+// request fields and also want access to the raw HTTP body.
+//
+// Example:
+//
+// message GetResourceRequest {
+// // A unique request id.
+// string request_id = 1;
+//
+// // The raw HTTP body is bound to this field.
+// google.api.HttpBody http_body = 2;
+//
+// }
+//
+// service ResourceService {
+// rpc GetResource(GetResourceRequest)
+// returns (google.api.HttpBody);
+// rpc UpdateResource(google.api.HttpBody)
+// returns (google.protobuf.Empty);
+//
+// }
+//
+// Example with streaming methods:
+//
+// service CaldavService {
+// rpc GetCalendar(stream google.api.HttpBody)
+// returns (stream google.api.HttpBody);
+// rpc UpdateCalendar(stream google.api.HttpBody)
+// returns (stream google.api.HttpBody);
+//
+// }
+//
+// Use of this type only changes how the request and response bodies are
+// handled, all other features will continue to work unchanged.
+message HttpBody {
+ // The HTTP Content-Type header value specifying the content type of the body.
+ string content_type = 1;
+
+ // The HTTP request/response body as raw binary.
+ bytes data = 2;
+
+ // Application specific response metadata. Must be set in the first response
+ // for streaming APIs.
+ repeated google.protobuf.Any extensions = 3;
+}
diff --git a/Lib/site-packages/google/api/httpbody_pb2.py b/Lib/site-packages/google/api/httpbody_pb2.py
new file mode 100644
index 0000000..dfb52ab
--- /dev/null
+++ b/Lib/site-packages/google/api/httpbody_pb2.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/httpbody.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x19google/api/httpbody.proto\x12\ngoogle.api\x1a\x19google/protobuf/any.proto"X\n\x08HttpBody\x12\x14\n\x0c\x63ontent_type\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12(\n\nextensions\x18\x03 \x03(\x0b\x32\x14.google.protobuf.AnyBh\n\x0e\x63om.google.apiB\rHttpBodyProtoP\x01Z;google.golang.org/genproto/googleapis/api/httpbody;httpbody\xf8\x01\x01\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+_HTTPBODY = DESCRIPTOR.message_types_by_name["HttpBody"]
+HttpBody = _reflection.GeneratedProtocolMessageType(
+ "HttpBody",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _HTTPBODY,
+ "__module__": "google.api.httpbody_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.HttpBody)
+ },
+)
+_sym_db.RegisterMessage(HttpBody)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\rHttpBodyProtoP\001Z;google.golang.org/genproto/googleapis/api/httpbody;httpbody\370\001\001\242\002\004GAPI"
+ _HTTPBODY._serialized_start = 68
+ _HTTPBODY._serialized_end = 156
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/label.proto b/Lib/site-packages/google/api/label.proto
new file mode 100644
index 0000000..698f6bd
--- /dev/null
+++ b/Lib/site-packages/google/api/label.proto
@@ -0,0 +1,48 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/api/label;label";
+option java_multiple_files = true;
+option java_outer_classname = "LabelProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// A description of a label.
+message LabelDescriptor {
+ // Value types that can be used as label values.
+ enum ValueType {
+ // A variable-length string. This is the default.
+ STRING = 0;
+
+ // Boolean; true or false.
+ BOOL = 1;
+
+ // A 64-bit signed integer.
+ INT64 = 2;
+ }
+
+ // The label key.
+ string key = 1;
+
+ // The type of data that can be assigned to the label.
+ ValueType value_type = 2;
+
+ // A human-readable description for the label.
+ string description = 3;
+}
diff --git a/Lib/site-packages/google/api/label_pb2.py b/Lib/site-packages/google/api/label_pb2.py
new file mode 100644
index 0000000..6741941
--- /dev/null
+++ b/Lib/site-packages/google/api/label_pb2.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/label.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x16google/api/label.proto\x12\ngoogle.api"\x9c\x01\n\x0fLabelDescriptor\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\nvalue_type\x18\x02 \x01(\x0e\x32%.google.api.LabelDescriptor.ValueType\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t",\n\tValueType\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x42OOL\x10\x01\x12\t\n\x05INT64\x10\x02\x42_\n\x0e\x63om.google.apiB\nLabelProtoP\x01Z5google.golang.org/genproto/googleapis/api/label;label\xf8\x01\x01\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+_LABELDESCRIPTOR = DESCRIPTOR.message_types_by_name["LabelDescriptor"]
+_LABELDESCRIPTOR_VALUETYPE = _LABELDESCRIPTOR.enum_types_by_name["ValueType"]
+LabelDescriptor = _reflection.GeneratedProtocolMessageType(
+ "LabelDescriptor",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LABELDESCRIPTOR,
+ "__module__": "google.api.label_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.LabelDescriptor)
+ },
+)
+_sym_db.RegisterMessage(LabelDescriptor)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\nLabelProtoP\001Z5google.golang.org/genproto/googleapis/api/label;label\370\001\001\242\002\004GAPI"
+ _LABELDESCRIPTOR._serialized_start = 39
+ _LABELDESCRIPTOR._serialized_end = 195
+ _LABELDESCRIPTOR_VALUETYPE._serialized_start = 151
+ _LABELDESCRIPTOR_VALUETYPE._serialized_end = 195
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/launch_stage.proto b/Lib/site-packages/google/api/launch_stage.proto
new file mode 100644
index 0000000..9802de7
--- /dev/null
+++ b/Lib/site-packages/google/api/launch_stage.proto
@@ -0,0 +1,72 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+option go_package = "google.golang.org/genproto/googleapis/api;api";
+option java_multiple_files = true;
+option java_outer_classname = "LaunchStageProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// The launch stage as defined by [Google Cloud Platform
+// Launch Stages](https://cloud.google.com/terms/launch-stages).
+enum LaunchStage {
+ // Do not use this default value.
+ LAUNCH_STAGE_UNSPECIFIED = 0;
+
+ // The feature is not yet implemented. Users can not use it.
+ UNIMPLEMENTED = 6;
+
+ // Prelaunch features are hidden from users and are only visible internally.
+ PRELAUNCH = 7;
+
+ // Early Access features are limited to a closed group of testers. To use
+ // these features, you must sign up in advance and sign a Trusted Tester
+ // agreement (which includes confidentiality provisions). These features may
+ // be unstable, changed in backward-incompatible ways, and are not
+ // guaranteed to be released.
+ EARLY_ACCESS = 1;
+
+ // Alpha is a limited availability test for releases before they are cleared
+ // for widespread use. By Alpha, all significant design issues are resolved
+ // and we are in the process of verifying functionality. Alpha customers
+ // need to apply for access, agree to applicable terms, and have their
+ // projects allowlisted. Alpha releases don't have to be feature complete,
+ // no SLAs are provided, and there are no technical support obligations, but
+ // they will be far enough along that customers can actually use them in
+ // test environments or for limited-use tests -- just like they would in
+ // normal production cases.
+ ALPHA = 2;
+
+ // Beta is the point at which we are ready to open a release for any
+ // customer to use. There are no SLA or technical support obligations in a
+ // Beta release. Products will be complete from a feature perspective, but
+ // may have some open outstanding issues. Beta releases are suitable for
+ // limited production use cases.
+ BETA = 3;
+
+ // GA features are open to all developers and are considered stable and
+ // fully qualified for production use.
+ GA = 4;
+
+ // Deprecated features are scheduled to be shut down and removed. For more
+ // information, see the "Deprecation Policy" section of our [Terms of
+ // Service](https://cloud.google.com/terms/)
+ // and the [Google Cloud Platform Subject to the Deprecation
+ // Policy](https://cloud.google.com/terms/deprecation) documentation.
+ DEPRECATED = 5;
+}
diff --git a/Lib/site-packages/google/api/launch_stage_pb2.py b/Lib/site-packages/google/api/launch_stage_pb2.py
new file mode 100644
index 0000000..f4ba6af
--- /dev/null
+++ b/Lib/site-packages/google/api/launch_stage_pb2.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/launch_stage.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b"\n\x1dgoogle/api/launch_stage.proto\x12\ngoogle.api*\x8c\x01\n\x0bLaunchStage\x12\x1c\n\x18LAUNCH_STAGE_UNSPECIFIED\x10\x00\x12\x11\n\rUNIMPLEMENTED\x10\x06\x12\r\n\tPRELAUNCH\x10\x07\x12\x10\n\x0c\x45\x41RLY_ACCESS\x10\x01\x12\t\n\x05\x41LPHA\x10\x02\x12\x08\n\x04\x42\x45TA\x10\x03\x12\x06\n\x02GA\x10\x04\x12\x0e\n\nDEPRECATED\x10\x05\x42Z\n\x0e\x63om.google.apiB\x10LaunchStageProtoP\x01Z-google.golang.org/genproto/googleapis/api;api\xa2\x02\x04GAPIb\x06proto3"
+)
+
+_LAUNCHSTAGE = DESCRIPTOR.enum_types_by_name["LaunchStage"]
+LaunchStage = enum_type_wrapper.EnumTypeWrapper(_LAUNCHSTAGE)
+LAUNCH_STAGE_UNSPECIFIED = 0
+UNIMPLEMENTED = 6
+PRELAUNCH = 7
+EARLY_ACCESS = 1
+ALPHA = 2
+BETA = 3
+GA = 4
+DEPRECATED = 5
+
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\020LaunchStageProtoP\001Z-google.golang.org/genproto/googleapis/api;api\242\002\004GAPI"
+ _LAUNCHSTAGE._serialized_start = 46
+ _LAUNCHSTAGE._serialized_end = 186
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/log.proto b/Lib/site-packages/google/api/log.proto
new file mode 100644
index 0000000..416c4f6
--- /dev/null
+++ b/Lib/site-packages/google/api/log.proto
@@ -0,0 +1,54 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+import "google/api/label.proto";
+
+option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig";
+option java_multiple_files = true;
+option java_outer_classname = "LogProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// A description of a log type. Example in YAML format:
+//
+// - name: library.googleapis.com/activity_history
+// description: The history of borrowing and returning library items.
+// display_name: Activity
+// labels:
+// - key: /customer_id
+// description: Identifier of a library customer
+message LogDescriptor {
+ // The name of the log. It must be less than 512 characters long and can
+ // include the following characters: upper- and lower-case alphanumeric
+ // characters [A-Za-z0-9], and punctuation characters including
+ // slash, underscore, hyphen, period [/_-.].
+ string name = 1;
+
+ // The set of labels that are available to describe a specific log entry.
+ // Runtime requests that contain labels not specified here are
+ // considered invalid.
+ repeated LabelDescriptor labels = 2;
+
+ // A human-readable description of this log. This information appears in
+ // the documentation and can contain details.
+ string description = 3;
+
+ // The human-readable name for this log. This information appears on
+ // the user interface and should be concise.
+ string display_name = 4;
+}
diff --git a/Lib/site-packages/google/api/log_pb2.py b/Lib/site-packages/google/api/log_pb2.py
new file mode 100644
index 0000000..775b3df
--- /dev/null
+++ b/Lib/site-packages/google/api/log_pb2.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/log.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import label_pb2 as google_dot_api_dot_label__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x14google/api/log.proto\x12\ngoogle.api\x1a\x16google/api/label.proto"u\n\rLogDescriptor\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x06labels\x18\x02 \x03(\x0b\x32\x1b.google.api.LabelDescriptor\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x04 \x01(\tBj\n\x0e\x63om.google.apiB\x08LogProtoP\x01ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+_LOGDESCRIPTOR = DESCRIPTOR.message_types_by_name["LogDescriptor"]
+LogDescriptor = _reflection.GeneratedProtocolMessageType(
+ "LogDescriptor",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LOGDESCRIPTOR,
+ "__module__": "google.api.log_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.LogDescriptor)
+ },
+)
+_sym_db.RegisterMessage(LogDescriptor)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\010LogProtoP\001ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\242\002\004GAPI"
+ _LOGDESCRIPTOR._serialized_start = 60
+ _LOGDESCRIPTOR._serialized_end = 177
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/logging.proto b/Lib/site-packages/google/api/logging.proto
new file mode 100644
index 0000000..650786f
--- /dev/null
+++ b/Lib/site-packages/google/api/logging.proto
@@ -0,0 +1,81 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig";
+option java_multiple_files = true;
+option java_outer_classname = "LoggingProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// Logging configuration of the service.
+//
+// The following example shows how to configure logs to be sent to the
+// producer and consumer projects. In the example, the `activity_history`
+// log is sent to both the producer and consumer projects, whereas the
+// `purchase_history` log is only sent to the producer project.
+//
+// monitored_resources:
+// - type: library.googleapis.com/branch
+// labels:
+// - key: /city
+// description: The city where the library branch is located in.
+// - key: /name
+// description: The name of the branch.
+// logs:
+// - name: activity_history
+// labels:
+// - key: /customer_id
+// - name: purchase_history
+// logging:
+// producer_destinations:
+// - monitored_resource: library.googleapis.com/branch
+// logs:
+// - activity_history
+// - purchase_history
+// consumer_destinations:
+// - monitored_resource: library.googleapis.com/branch
+// logs:
+// - activity_history
+message Logging {
+ // Configuration of a specific logging destination (the producer project
+ // or the consumer project).
+ message LoggingDestination {
+ // The monitored resource type. The type must be defined in the
+ // [Service.monitored_resources][google.api.Service.monitored_resources]
+ // section.
+ string monitored_resource = 3;
+
+ // Names of the logs to be sent to this destination. Each name must
+ // be defined in the [Service.logs][google.api.Service.logs] section. If the
+ // log name is not a domain scoped name, it will be automatically prefixed
+ // with the service name followed by "/".
+ repeated string logs = 1;
+ }
+
+ // Logging configurations for sending logs to the producer project.
+ // There can be multiple producer destinations, each one must have a
+ // different monitored resource type. A log can be used in at most
+ // one producer destination.
+ repeated LoggingDestination producer_destinations = 1;
+
+ // Logging configurations for sending logs to the consumer project.
+ // There can be multiple consumer destinations, each one must have a
+ // different monitored resource type. A log can be used in at most
+ // one consumer destination.
+ repeated LoggingDestination consumer_destinations = 2;
+}
diff --git a/Lib/site-packages/google/api/logging_pb2.py b/Lib/site-packages/google/api/logging_pb2.py
new file mode 100644
index 0000000..c9cd00e
--- /dev/null
+++ b/Lib/site-packages/google/api/logging_pb2.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/logging.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x18google/api/logging.proto\x12\ngoogle.api"\xd7\x01\n\x07Logging\x12\x45\n\x15producer_destinations\x18\x01 \x03(\x0b\x32&.google.api.Logging.LoggingDestination\x12\x45\n\x15\x63onsumer_destinations\x18\x02 \x03(\x0b\x32&.google.api.Logging.LoggingDestination\x1a>\n\x12LoggingDestination\x12\x1a\n\x12monitored_resource\x18\x03 \x01(\t\x12\x0c\n\x04logs\x18\x01 \x03(\tBn\n\x0e\x63om.google.apiB\x0cLoggingProtoP\x01ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+_LOGGING = DESCRIPTOR.message_types_by_name["Logging"]
+_LOGGING_LOGGINGDESTINATION = _LOGGING.nested_types_by_name["LoggingDestination"]
+Logging = _reflection.GeneratedProtocolMessageType(
+ "Logging",
+ (_message.Message,),
+ {
+ "LoggingDestination": _reflection.GeneratedProtocolMessageType(
+ "LoggingDestination",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LOGGING_LOGGINGDESTINATION,
+ "__module__": "google.api.logging_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.Logging.LoggingDestination)
+ },
+ ),
+ "DESCRIPTOR": _LOGGING,
+ "__module__": "google.api.logging_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.Logging)
+ },
+)
+_sym_db.RegisterMessage(Logging)
+_sym_db.RegisterMessage(Logging.LoggingDestination)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\014LoggingProtoP\001ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\242\002\004GAPI"
+ _LOGGING._serialized_start = 41
+ _LOGGING._serialized_end = 256
+ _LOGGING_LOGGINGDESTINATION._serialized_start = 194
+ _LOGGING_LOGGINGDESTINATION._serialized_end = 256
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/metric.proto b/Lib/site-packages/google/api/metric.proto
new file mode 100644
index 0000000..9bf043c
--- /dev/null
+++ b/Lib/site-packages/google/api/metric.proto
@@ -0,0 +1,268 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+import "google/api/label.proto";
+import "google/api/launch_stage.proto";
+import "google/protobuf/duration.proto";
+
+option go_package = "google.golang.org/genproto/googleapis/api/metric;metric";
+option java_multiple_files = true;
+option java_outer_classname = "MetricProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// Defines a metric type and its schema. Once a metric descriptor is created,
+// deleting or altering it stops data collection and makes the metric type's
+// existing data unusable.
+//
+message MetricDescriptor {
+ // The kind of measurement. It describes how the data is reported.
+ // For information on setting the start time and end time based on
+ // the MetricKind, see [TimeInterval][google.monitoring.v3.TimeInterval].
+ enum MetricKind {
+ // Do not use this default value.
+ METRIC_KIND_UNSPECIFIED = 0;
+
+ // An instantaneous measurement of a value.
+ GAUGE = 1;
+
+ // The change in a value during a time interval.
+ DELTA = 2;
+
+ // A value accumulated over a time interval. Cumulative
+ // measurements in a time series should have the same start time
+ // and increasing end times, until an event resets the cumulative
+ // value to zero and sets a new start time for the following
+ // points.
+ CUMULATIVE = 3;
+ }
+
+ // The value type of a metric.
+ enum ValueType {
+ // Do not use this default value.
+ VALUE_TYPE_UNSPECIFIED = 0;
+
+ // The value is a boolean.
+ // This value type can be used only if the metric kind is `GAUGE`.
+ BOOL = 1;
+
+ // The value is a signed 64-bit integer.
+ INT64 = 2;
+
+ // The value is a double precision floating point number.
+ DOUBLE = 3;
+
+ // The value is a text string.
+ // This value type can be used only if the metric kind is `GAUGE`.
+ STRING = 4;
+
+ // The value is a [`Distribution`][google.api.Distribution].
+ DISTRIBUTION = 5;
+
+ // The value is money.
+ MONEY = 6;
+ }
+
+ // Additional annotations that can be used to guide the usage of a metric.
+ message MetricDescriptorMetadata {
+ // Deprecated. Must use the
+ // [MetricDescriptor.launch_stage][google.api.MetricDescriptor.launch_stage]
+ // instead.
+ LaunchStage launch_stage = 1 [deprecated = true];
+
+ // The sampling period of metric data points. For metrics which are written
+ // periodically, consecutive data points are stored at this time interval,
+ // excluding data loss due to errors. Metrics with a higher granularity have
+ // a smaller sampling period.
+ google.protobuf.Duration sample_period = 2;
+
+ // The delay of data points caused by ingestion. Data points older than this
+ // age are guaranteed to be ingested and available to be read, excluding
+ // data loss due to errors.
+ google.protobuf.Duration ingest_delay = 3;
+ }
+
+ // The resource name of the metric descriptor.
+ string name = 1;
+
+ // The metric type, including its DNS name prefix. The type is not
+ // URL-encoded. All user-defined metric types have the DNS name
+ // `custom.googleapis.com` or `external.googleapis.com`. Metric types should
+ // use a natural hierarchical grouping. For example:
+ //
+ // "custom.googleapis.com/invoice/paid/amount"
+ // "external.googleapis.com/prometheus/up"
+ // "appengine.googleapis.com/http/server/response_latencies"
+ string type = 8;
+
+ // The set of labels that can be used to describe a specific
+ // instance of this metric type. For example, the
+ // `appengine.googleapis.com/http/server/response_latencies` metric
+ // type has a label for the HTTP response code, `response_code`, so
+ // you can look at latencies for successful responses or just
+ // for responses that failed.
+ repeated LabelDescriptor labels = 2;
+
+ // Whether the metric records instantaneous values, changes to a value, etc.
+ // Some combinations of `metric_kind` and `value_type` might not be supported.
+ MetricKind metric_kind = 3;
+
+ // Whether the measurement is an integer, a floating-point number, etc.
+ // Some combinations of `metric_kind` and `value_type` might not be supported.
+ ValueType value_type = 4;
+
+ // The units in which the metric value is reported. It is only applicable
+ // if the `value_type` is `INT64`, `DOUBLE`, or `DISTRIBUTION`. The `unit`
+ // defines the representation of the stored metric values.
+ //
+ // Different systems might scale the values to be more easily displayed (so a
+ // value of `0.02kBy` _might_ be displayed as `20By`, and a value of
+ // `3523kBy` _might_ be displayed as `3.5MBy`). However, if the `unit` is
+ // `kBy`, then the value of the metric is always in thousands of bytes, no
+ // matter how it might be displayed.
+ //
+ // If you want a custom metric to record the exact number of CPU-seconds used
+ // by a job, you can create an `INT64 CUMULATIVE` metric whose `unit` is
+ // `s{CPU}` (or equivalently `1s{CPU}` or just `s`). If the job uses 12,005
+ // CPU-seconds, then the value is written as `12005`.
+ //
+ // Alternatively, if you want a custom metric to record data in a more
+ // granular way, you can create a `DOUBLE CUMULATIVE` metric whose `unit` is
+ // `ks{CPU}`, and then write the value `12.005` (which is `12005/1000`),
+ // or use `Kis{CPU}` and write `11.723` (which is `12005/1024`).
+ //
+ // The supported units are a subset of [The Unified Code for Units of
+ // Measure](https://unitsofmeasure.org/ucum.html) standard:
+ //
+ // **Basic units (UNIT)**
+ //
+ // * `bit` bit
+ // * `By` byte
+ // * `s` second
+ // * `min` minute
+ // * `h` hour
+ // * `d` day
+ // * `1` dimensionless
+ //
+ // **Prefixes (PREFIX)**
+ //
+ // * `k` kilo (10^3)
+ // * `M` mega (10^6)
+ // * `G` giga (10^9)
+ // * `T` tera (10^12)
+ // * `P` peta (10^15)
+ // * `E` exa (10^18)
+ // * `Z` zetta (10^21)
+ // * `Y` yotta (10^24)
+ //
+ // * `m` milli (10^-3)
+ // * `u` micro (10^-6)
+ // * `n` nano (10^-9)
+ // * `p` pico (10^-12)
+ // * `f` femto (10^-15)
+ // * `a` atto (10^-18)
+ // * `z` zepto (10^-21)
+ // * `y` yocto (10^-24)
+ //
+ // * `Ki` kibi (2^10)
+ // * `Mi` mebi (2^20)
+ // * `Gi` gibi (2^30)
+ // * `Ti` tebi (2^40)
+ // * `Pi` pebi (2^50)
+ //
+ // **Grammar**
+ //
+ // The grammar also includes these connectors:
+ //
+ // * `/` division or ratio (as an infix operator). For examples,
+ // `kBy/{email}` or `MiBy/10ms` (although you should almost never
+ // have `/s` in a metric `unit`; rates should always be computed at
+ // query time from the underlying cumulative or delta value).
+ // * `.` multiplication or composition (as an infix operator). For
+ // examples, `GBy.d` or `k{watt}.h`.
+ //
+ // The grammar for a unit is as follows:
+ //
+ // Expression = Component { "." Component } { "/" Component } ;
+ //
+ // Component = ( [ PREFIX ] UNIT | "%" ) [ Annotation ]
+ // | Annotation
+ // | "1"
+ // ;
+ //
+ // Annotation = "{" NAME "}" ;
+ //
+ // Notes:
+ //
+ // * `Annotation` is just a comment if it follows a `UNIT`. If the annotation
+ // is used alone, then the unit is equivalent to `1`. For examples,
+ // `{request}/s == 1/s`, `By{transmitted}/s == By/s`.
+ // * `NAME` is a sequence of non-blank printable ASCII characters not
+ // containing `{` or `}`.
+ // * `1` represents a unitary [dimensionless
+ // unit](https://en.wikipedia.org/wiki/Dimensionless_quantity) of 1, such
+ // as in `1/s`. It is typically used when none of the basic units are
+ // appropriate. For example, "new users per day" can be represented as
+ // `1/d` or `{new-users}/d` (and a metric value `5` would mean "5 new
+ // users). Alternatively, "thousands of page views per day" would be
+ // represented as `1000/d` or `k1/d` or `k{page_views}/d` (and a metric
+ // value of `5.3` would mean "5300 page views per day").
+ // * `%` represents dimensionless value of 1/100, and annotates values giving
+ // a percentage (so the metric values are typically in the range of 0..100,
+ // and a metric value `3` means "3 percent").
+ // * `10^2.%` indicates a metric contains a ratio, typically in the range
+ // 0..1, that will be multiplied by 100 and displayed as a percentage
+ // (so a metric value `0.03` means "3 percent").
+ string unit = 5;
+
+ // A detailed description of the metric, which can be used in documentation.
+ string description = 6;
+
+ // A concise name for the metric, which can be displayed in user interfaces.
+ // Use sentence case without an ending period, for example "Request count".
+ // This field is optional but it is recommended to be set for any metrics
+ // associated with user-visible concepts, such as Quota.
+ string display_name = 7;
+
+ // Optional. Metadata which can be used to guide usage of the metric.
+ MetricDescriptorMetadata metadata = 10;
+
+ // Optional. The launch stage of the metric definition.
+ LaunchStage launch_stage = 12;
+
+ // Read-only. If present, then a [time
+ // series][google.monitoring.v3.TimeSeries], which is identified partially by
+ // a metric type and a
+ // [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor], that
+ // is associated with this metric type can only be associated with one of the
+ // monitored resource types listed here.
+ repeated string monitored_resource_types = 13;
+}
+
+// A specific metric, identified by specifying values for all of the
+// labels of a [`MetricDescriptor`][google.api.MetricDescriptor].
+message Metric {
+ // An existing metric type, see
+ // [google.api.MetricDescriptor][google.api.MetricDescriptor]. For example,
+ // `custom.googleapis.com/invoice/paid/amount`.
+ string type = 3;
+
+ // The set of label values that uniquely identify this metric. All
+ // labels listed in the `MetricDescriptor` must be assigned values.
+ map labels = 2;
+}
diff --git a/Lib/site-packages/google/api/metric_pb2.py b/Lib/site-packages/google/api/metric_pb2.py
new file mode 100644
index 0000000..c0b62ee
--- /dev/null
+++ b/Lib/site-packages/google/api/metric_pb2.py
@@ -0,0 +1,115 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/metric.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import label_pb2 as google_dot_api_dot_label__pb2
+from google.api import launch_stage_pb2 as google_dot_api_dot_launch__stage__pb2
+from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x17google/api/metric.proto\x12\ngoogle.api\x1a\x16google/api/label.proto\x1a\x1dgoogle/api/launch_stage.proto\x1a\x1egoogle/protobuf/duration.proto"\x9f\x06\n\x10MetricDescriptor\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x08 \x01(\t\x12+\n\x06labels\x18\x02 \x03(\x0b\x32\x1b.google.api.LabelDescriptor\x12<\n\x0bmetric_kind\x18\x03 \x01(\x0e\x32\'.google.api.MetricDescriptor.MetricKind\x12:\n\nvalue_type\x18\x04 \x01(\x0e\x32&.google.api.MetricDescriptor.ValueType\x12\x0c\n\x04unit\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x07 \x01(\t\x12G\n\x08metadata\x18\n \x01(\x0b\x32\x35.google.api.MetricDescriptor.MetricDescriptorMetadata\x12-\n\x0claunch_stage\x18\x0c \x01(\x0e\x32\x17.google.api.LaunchStage\x12 \n\x18monitored_resource_types\x18\r \x03(\t\x1a\xb0\x01\n\x18MetricDescriptorMetadata\x12\x31\n\x0claunch_stage\x18\x01 \x01(\x0e\x32\x17.google.api.LaunchStageB\x02\x18\x01\x12\x30\n\rsample_period\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12/\n\x0cingest_delay\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration"O\n\nMetricKind\x12\x1b\n\x17METRIC_KIND_UNSPECIFIED\x10\x00\x12\t\n\x05GAUGE\x10\x01\x12\t\n\x05\x44\x45LTA\x10\x02\x12\x0e\n\nCUMULATIVE\x10\x03"q\n\tValueType\x12\x1a\n\x16VALUE_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x42OOL\x10\x01\x12\t\n\x05INT64\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\n\n\x06STRING\x10\x04\x12\x10\n\x0c\x44ISTRIBUTION\x10\x05\x12\t\n\x05MONEY\x10\x06"u\n\x06Metric\x12\x0c\n\x04type\x18\x03 \x01(\t\x12.\n\x06labels\x18\x02 \x03(\x0b\x32\x1e.google.api.Metric.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42_\n\x0e\x63om.google.apiB\x0bMetricProtoP\x01Z7google.golang.org/genproto/googleapis/api/metric;metric\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+_METRICDESCRIPTOR = DESCRIPTOR.message_types_by_name["MetricDescriptor"]
+_METRICDESCRIPTOR_METRICDESCRIPTORMETADATA = _METRICDESCRIPTOR.nested_types_by_name[
+ "MetricDescriptorMetadata"
+]
+_METRIC = DESCRIPTOR.message_types_by_name["Metric"]
+_METRIC_LABELSENTRY = _METRIC.nested_types_by_name["LabelsEntry"]
+_METRICDESCRIPTOR_METRICKIND = _METRICDESCRIPTOR.enum_types_by_name["MetricKind"]
+_METRICDESCRIPTOR_VALUETYPE = _METRICDESCRIPTOR.enum_types_by_name["ValueType"]
+MetricDescriptor = _reflection.GeneratedProtocolMessageType(
+ "MetricDescriptor",
+ (_message.Message,),
+ {
+ "MetricDescriptorMetadata": _reflection.GeneratedProtocolMessageType(
+ "MetricDescriptorMetadata",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _METRICDESCRIPTOR_METRICDESCRIPTORMETADATA,
+ "__module__": "google.api.metric_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.MetricDescriptor.MetricDescriptorMetadata)
+ },
+ ),
+ "DESCRIPTOR": _METRICDESCRIPTOR,
+ "__module__": "google.api.metric_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.MetricDescriptor)
+ },
+)
+_sym_db.RegisterMessage(MetricDescriptor)
+_sym_db.RegisterMessage(MetricDescriptor.MetricDescriptorMetadata)
+
+Metric = _reflection.GeneratedProtocolMessageType(
+ "Metric",
+ (_message.Message,),
+ {
+ "LabelsEntry": _reflection.GeneratedProtocolMessageType(
+ "LabelsEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _METRIC_LABELSENTRY,
+ "__module__": "google.api.metric_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.Metric.LabelsEntry)
+ },
+ ),
+ "DESCRIPTOR": _METRIC,
+ "__module__": "google.api.metric_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.Metric)
+ },
+)
+_sym_db.RegisterMessage(Metric)
+_sym_db.RegisterMessage(Metric.LabelsEntry)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\013MetricProtoP\001Z7google.golang.org/genproto/googleapis/api/metric;metric\242\002\004GAPI"
+ _METRICDESCRIPTOR_METRICDESCRIPTORMETADATA.fields_by_name[
+ "launch_stage"
+ ]._options = None
+ _METRICDESCRIPTOR_METRICDESCRIPTORMETADATA.fields_by_name[
+ "launch_stage"
+ ]._serialized_options = b"\030\001"
+ _METRIC_LABELSENTRY._options = None
+ _METRIC_LABELSENTRY._serialized_options = b"8\001"
+ _METRICDESCRIPTOR._serialized_start = 127
+ _METRICDESCRIPTOR._serialized_end = 926
+ _METRICDESCRIPTOR_METRICDESCRIPTORMETADATA._serialized_start = 554
+ _METRICDESCRIPTOR_METRICDESCRIPTORMETADATA._serialized_end = 730
+ _METRICDESCRIPTOR_METRICKIND._serialized_start = 732
+ _METRICDESCRIPTOR_METRICKIND._serialized_end = 811
+ _METRICDESCRIPTOR_VALUETYPE._serialized_start = 813
+ _METRICDESCRIPTOR_VALUETYPE._serialized_end = 926
+ _METRIC._serialized_start = 928
+ _METRIC._serialized_end = 1045
+ _METRIC_LABELSENTRY._serialized_start = 1000
+ _METRIC_LABELSENTRY._serialized_end = 1045
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/monitored_resource.proto b/Lib/site-packages/google/api/monitored_resource.proto
new file mode 100644
index 0000000..c6f9759
--- /dev/null
+++ b/Lib/site-packages/google/api/monitored_resource.proto
@@ -0,0 +1,130 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+import "google/api/label.proto";
+import "google/api/launch_stage.proto";
+import "google/protobuf/struct.proto";
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/api/monitoredres;monitoredres";
+option java_multiple_files = true;
+option java_outer_classname = "MonitoredResourceProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// An object that describes the schema of a
+// [MonitoredResource][google.api.MonitoredResource] object using a type name
+// and a set of labels. For example, the monitored resource descriptor for
+// Google Compute Engine VM instances has a type of
+// `"gce_instance"` and specifies the use of the labels `"instance_id"` and
+// `"zone"` to identify particular VM instances.
+//
+// Different APIs can support different monitored resource types. APIs generally
+// provide a `list` method that returns the monitored resource descriptors used
+// by the API.
+//
+message MonitoredResourceDescriptor {
+ // Optional. The resource name of the monitored resource descriptor:
+ // `"projects/{project_id}/monitoredResourceDescriptors/{type}"` where
+ // {type} is the value of the `type` field in this object and
+ // {project_id} is a project ID that provides API-specific context for
+ // accessing the type. APIs that do not use project information can use the
+ // resource name format `"monitoredResourceDescriptors/{type}"`.
+ string name = 5;
+
+ // Required. The monitored resource type. For example, the type
+ // `"cloudsql_database"` represents databases in Google Cloud SQL.
+ // For a list of types, see [Monitoring resource
+ // types](https://cloud.google.com/monitoring/api/resources)
+ // and [Logging resource
+ // types](https://cloud.google.com/logging/docs/api/v2/resource-list).
+ string type = 1;
+
+ // Optional. A concise name for the monitored resource type that might be
+ // displayed in user interfaces. It should be a Title Cased Noun Phrase,
+ // without any article or other determiners. For example,
+ // `"Google Cloud SQL Database"`.
+ string display_name = 2;
+
+ // Optional. A detailed description of the monitored resource type that might
+ // be used in documentation.
+ string description = 3;
+
+ // Required. A set of labels used to describe instances of this monitored
+ // resource type. For example, an individual Google Cloud SQL database is
+ // identified by values for the labels `"database_id"` and `"zone"`.
+ repeated LabelDescriptor labels = 4;
+
+ // Optional. The launch stage of the monitored resource definition.
+ LaunchStage launch_stage = 7;
+}
+
+// An object representing a resource that can be used for monitoring, logging,
+// billing, or other purposes. Examples include virtual machine instances,
+// databases, and storage devices such as disks. The `type` field identifies a
+// [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor] object
+// that describes the resource's schema. Information in the `labels` field
+// identifies the actual resource and its attributes according to the schema.
+// For example, a particular Compute Engine VM instance could be represented by
+// the following object, because the
+// [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor] for
+// `"gce_instance"` has labels
+// `"project_id"`, `"instance_id"` and `"zone"`:
+//
+// { "type": "gce_instance",
+// "labels": { "project_id": "my-project",
+// "instance_id": "12345678901234",
+// "zone": "us-central1-a" }}
+message MonitoredResource {
+ // Required. The monitored resource type. This field must match
+ // the `type` field of a
+ // [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor]
+ // object. For example, the type of a Compute Engine VM instance is
+ // `gce_instance`. Some descriptors include the service name in the type; for
+ // example, the type of a Datastream stream is
+ // `datastream.googleapis.com/Stream`.
+ string type = 1;
+
+ // Required. Values for all of the labels listed in the associated monitored
+ // resource descriptor. For example, Compute Engine VM instances use the
+ // labels `"project_id"`, `"instance_id"`, and `"zone"`.
+ map labels = 2;
+}
+
+// Auxiliary metadata for a [MonitoredResource][google.api.MonitoredResource]
+// object. [MonitoredResource][google.api.MonitoredResource] objects contain the
+// minimum set of information to uniquely identify a monitored resource
+// instance. There is some other useful auxiliary metadata. Monitoring and
+// Logging use an ingestion pipeline to extract metadata for cloud resources of
+// all types, and store the metadata in this message.
+message MonitoredResourceMetadata {
+ // Output only. Values for predefined system metadata labels.
+ // System labels are a kind of metadata extracted by Google, including
+ // "machine_image", "vpc", "subnet_id",
+ // "security_group", "name", etc.
+ // System label values can be only strings, Boolean values, or a list of
+ // strings. For example:
+ //
+ // { "name": "my-test-instance",
+ // "security_group": ["a", "b", "c"],
+ // "spot_instance": false }
+ google.protobuf.Struct system_labels = 1;
+
+ // Output only. A map of user-defined metadata labels.
+ map user_labels = 2;
+}
diff --git a/Lib/site-packages/google/api/monitored_resource_pb2.py b/Lib/site-packages/google/api/monitored_resource_pb2.py
new file mode 100644
index 0000000..07be8aa
--- /dev/null
+++ b/Lib/site-packages/google/api/monitored_resource_pb2.py
@@ -0,0 +1,123 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/monitored_resource.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import label_pb2 as google_dot_api_dot_label__pb2
+from google.api import launch_stage_pb2 as google_dot_api_dot_launch__stage__pb2
+from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n#google/api/monitored_resource.proto\x12\ngoogle.api\x1a\x16google/api/label.proto\x1a\x1dgoogle/api/launch_stage.proto\x1a\x1cgoogle/protobuf/struct.proto"\xc0\x01\n\x1bMonitoredResourceDescriptor\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12+\n\x06labels\x18\x04 \x03(\x0b\x32\x1b.google.api.LabelDescriptor\x12-\n\x0claunch_stage\x18\x07 \x01(\x0e\x32\x17.google.api.LaunchStage"\x8b\x01\n\x11MonitoredResource\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x39\n\x06labels\x18\x02 \x03(\x0b\x32).google.api.MonitoredResource.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xca\x01\n\x19MonitoredResourceMetadata\x12.\n\rsystem_labels\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct\x12J\n\x0buser_labels\x18\x02 \x03(\x0b\x32\x35.google.api.MonitoredResourceMetadata.UserLabelsEntry\x1a\x31\n\x0fUserLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42y\n\x0e\x63om.google.apiB\x16MonitoredResourceProtoP\x01ZCgoogle.golang.org/genproto/googleapis/api/monitoredres;monitoredres\xf8\x01\x01\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+_MONITOREDRESOURCEDESCRIPTOR = DESCRIPTOR.message_types_by_name[
+ "MonitoredResourceDescriptor"
+]
+_MONITOREDRESOURCE = DESCRIPTOR.message_types_by_name["MonitoredResource"]
+_MONITOREDRESOURCE_LABELSENTRY = _MONITOREDRESOURCE.nested_types_by_name["LabelsEntry"]
+_MONITOREDRESOURCEMETADATA = DESCRIPTOR.message_types_by_name[
+ "MonitoredResourceMetadata"
+]
+_MONITOREDRESOURCEMETADATA_USERLABELSENTRY = (
+ _MONITOREDRESOURCEMETADATA.nested_types_by_name["UserLabelsEntry"]
+)
+MonitoredResourceDescriptor = _reflection.GeneratedProtocolMessageType(
+ "MonitoredResourceDescriptor",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _MONITOREDRESOURCEDESCRIPTOR,
+ "__module__": "google.api.monitored_resource_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.MonitoredResourceDescriptor)
+ },
+)
+_sym_db.RegisterMessage(MonitoredResourceDescriptor)
+
+MonitoredResource = _reflection.GeneratedProtocolMessageType(
+ "MonitoredResource",
+ (_message.Message,),
+ {
+ "LabelsEntry": _reflection.GeneratedProtocolMessageType(
+ "LabelsEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _MONITOREDRESOURCE_LABELSENTRY,
+ "__module__": "google.api.monitored_resource_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.MonitoredResource.LabelsEntry)
+ },
+ ),
+ "DESCRIPTOR": _MONITOREDRESOURCE,
+ "__module__": "google.api.monitored_resource_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.MonitoredResource)
+ },
+)
+_sym_db.RegisterMessage(MonitoredResource)
+_sym_db.RegisterMessage(MonitoredResource.LabelsEntry)
+
+MonitoredResourceMetadata = _reflection.GeneratedProtocolMessageType(
+ "MonitoredResourceMetadata",
+ (_message.Message,),
+ {
+ "UserLabelsEntry": _reflection.GeneratedProtocolMessageType(
+ "UserLabelsEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _MONITOREDRESOURCEMETADATA_USERLABELSENTRY,
+ "__module__": "google.api.monitored_resource_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.MonitoredResourceMetadata.UserLabelsEntry)
+ },
+ ),
+ "DESCRIPTOR": _MONITOREDRESOURCEMETADATA,
+ "__module__": "google.api.monitored_resource_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.MonitoredResourceMetadata)
+ },
+)
+_sym_db.RegisterMessage(MonitoredResourceMetadata)
+_sym_db.RegisterMessage(MonitoredResourceMetadata.UserLabelsEntry)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\026MonitoredResourceProtoP\001ZCgoogle.golang.org/genproto/googleapis/api/monitoredres;monitoredres\370\001\001\242\002\004GAPI"
+ _MONITOREDRESOURCE_LABELSENTRY._options = None
+ _MONITOREDRESOURCE_LABELSENTRY._serialized_options = b"8\001"
+ _MONITOREDRESOURCEMETADATA_USERLABELSENTRY._options = None
+ _MONITOREDRESOURCEMETADATA_USERLABELSENTRY._serialized_options = b"8\001"
+ _MONITOREDRESOURCEDESCRIPTOR._serialized_start = 137
+ _MONITOREDRESOURCEDESCRIPTOR._serialized_end = 329
+ _MONITOREDRESOURCE._serialized_start = 332
+ _MONITOREDRESOURCE._serialized_end = 471
+ _MONITOREDRESOURCE_LABELSENTRY._serialized_start = 426
+ _MONITOREDRESOURCE_LABELSENTRY._serialized_end = 471
+ _MONITOREDRESOURCEMETADATA._serialized_start = 474
+ _MONITOREDRESOURCEMETADATA._serialized_end = 676
+ _MONITOREDRESOURCEMETADATA_USERLABELSENTRY._serialized_start = 627
+ _MONITOREDRESOURCEMETADATA_USERLABELSENTRY._serialized_end = 676
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/monitoring.proto b/Lib/site-packages/google/api/monitoring.proto
new file mode 100644
index 0000000..753703e
--- /dev/null
+++ b/Lib/site-packages/google/api/monitoring.proto
@@ -0,0 +1,107 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig";
+option java_multiple_files = true;
+option java_outer_classname = "MonitoringProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// Monitoring configuration of the service.
+//
+// The example below shows how to configure monitored resources and metrics
+// for monitoring. In the example, a monitored resource and two metrics are
+// defined. The `library.googleapis.com/book/returned_count` metric is sent
+// to both producer and consumer projects, whereas the
+// `library.googleapis.com/book/num_overdue` metric is only sent to the
+// consumer project.
+//
+// monitored_resources:
+// - type: library.googleapis.com/Branch
+// display_name: "Library Branch"
+// description: "A branch of a library."
+// launch_stage: GA
+// labels:
+// - key: resource_container
+// description: "The Cloud container (ie. project id) for the Branch."
+// - key: location
+// description: "The location of the library branch."
+// - key: branch_id
+// description: "The id of the branch."
+// metrics:
+// - name: library.googleapis.com/book/returned_count
+// display_name: "Books Returned"
+// description: "The count of books that have been returned."
+// launch_stage: GA
+// metric_kind: DELTA
+// value_type: INT64
+// unit: "1"
+// labels:
+// - key: customer_id
+// description: "The id of the customer."
+// - name: library.googleapis.com/book/num_overdue
+// display_name: "Books Overdue"
+// description: "The current number of overdue books."
+// launch_stage: GA
+// metric_kind: GAUGE
+// value_type: INT64
+// unit: "1"
+// labels:
+// - key: customer_id
+// description: "The id of the customer."
+// monitoring:
+// producer_destinations:
+// - monitored_resource: library.googleapis.com/Branch
+// metrics:
+// - library.googleapis.com/book/returned_count
+// consumer_destinations:
+// - monitored_resource: library.googleapis.com/Branch
+// metrics:
+// - library.googleapis.com/book/returned_count
+// - library.googleapis.com/book/num_overdue
+message Monitoring {
+ // Configuration of a specific monitoring destination (the producer project
+ // or the consumer project).
+ message MonitoringDestination {
+ // The monitored resource type. The type must be defined in
+ // [Service.monitored_resources][google.api.Service.monitored_resources]
+ // section.
+ string monitored_resource = 1;
+
+ // Types of the metrics to report to this monitoring destination.
+ // Each type must be defined in
+ // [Service.metrics][google.api.Service.metrics] section.
+ repeated string metrics = 2;
+ }
+
+ // Monitoring configurations for sending metrics to the producer project.
+ // There can be multiple producer destinations. A monitored resource type may
+ // appear in multiple monitoring destinations if different aggregations are
+ // needed for different sets of metrics associated with that monitored
+ // resource type. A monitored resource and metric pair may only be used once
+ // in the Monitoring configuration.
+ repeated MonitoringDestination producer_destinations = 1;
+
+ // Monitoring configurations for sending metrics to the consumer project.
+ // There can be multiple consumer destinations. A monitored resource type may
+ // appear in multiple monitoring destinations if different aggregations are
+ // needed for different sets of metrics associated with that monitored
+ // resource type. A monitored resource and metric pair may only be used once
+ // in the Monitoring configuration.
+ repeated MonitoringDestination consumer_destinations = 2;
+}
diff --git a/Lib/site-packages/google/api/monitoring_pb2.py b/Lib/site-packages/google/api/monitoring_pb2.py
new file mode 100644
index 0000000..c83a64c
--- /dev/null
+++ b/Lib/site-packages/google/api/monitoring_pb2.py
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/monitoring.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x1bgoogle/api/monitoring.proto\x12\ngoogle.api"\xec\x01\n\nMonitoring\x12K\n\x15producer_destinations\x18\x01 \x03(\x0b\x32,.google.api.Monitoring.MonitoringDestination\x12K\n\x15\x63onsumer_destinations\x18\x02 \x03(\x0b\x32,.google.api.Monitoring.MonitoringDestination\x1a\x44\n\x15MonitoringDestination\x12\x1a\n\x12monitored_resource\x18\x01 \x01(\t\x12\x0f\n\x07metrics\x18\x02 \x03(\tBq\n\x0e\x63om.google.apiB\x0fMonitoringProtoP\x01ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+_MONITORING = DESCRIPTOR.message_types_by_name["Monitoring"]
+_MONITORING_MONITORINGDESTINATION = _MONITORING.nested_types_by_name[
+ "MonitoringDestination"
+]
+Monitoring = _reflection.GeneratedProtocolMessageType(
+ "Monitoring",
+ (_message.Message,),
+ {
+ "MonitoringDestination": _reflection.GeneratedProtocolMessageType(
+ "MonitoringDestination",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _MONITORING_MONITORINGDESTINATION,
+ "__module__": "google.api.monitoring_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.Monitoring.MonitoringDestination)
+ },
+ ),
+ "DESCRIPTOR": _MONITORING,
+ "__module__": "google.api.monitoring_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.Monitoring)
+ },
+)
+_sym_db.RegisterMessage(Monitoring)
+_sym_db.RegisterMessage(Monitoring.MonitoringDestination)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\017MonitoringProtoP\001ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\242\002\004GAPI"
+ _MONITORING._serialized_start = 44
+ _MONITORING._serialized_end = 280
+ _MONITORING_MONITORINGDESTINATION._serialized_start = 212
+ _MONITORING_MONITORINGDESTINATION._serialized_end = 280
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/policy.proto b/Lib/site-packages/google/api/policy.proto
new file mode 100644
index 0000000..dd202bc
--- /dev/null
+++ b/Lib/site-packages/google/api/policy.proto
@@ -0,0 +1,85 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+import "google/protobuf/descriptor.proto";
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig";
+option java_multiple_files = true;
+option java_outer_classname = "PolicyProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// Provides `google.api.field_policy` annotation at proto fields.
+extend google.protobuf.FieldOptions {
+ // See [FieldPolicy][].
+ FieldPolicy field_policy = 158361448;
+}
+
+// Provides `google.api.method_policy` annotation at proto methods.
+extend google.protobuf.MethodOptions {
+ // See [MethodPolicy][].
+ MethodPolicy method_policy = 161893301;
+}
+
+// Google API Policy Annotation
+//
+// This message defines a simple API policy annotation that can be used to
+// annotate API request and response message fields with applicable policies.
+// One field may have multiple applicable policies that must all be satisfied
+// before a request can be processed. This policy annotation is used to
+// generate the overall policy that will be used for automatic runtime
+// policy enforcement and documentation generation.
+message FieldPolicy {
+ // Selects one or more request or response message fields to apply this
+ // `FieldPolicy`.
+ //
+ // When a `FieldPolicy` is used in proto annotation, the selector must
+ // be left as empty. The service config generator will automatically fill
+ // the correct value.
+ //
+ // When a `FieldPolicy` is used in service config, the selector must be a
+ // comma-separated string with valid request or response field paths,
+ // such as "foo.bar" or "foo.bar,foo.baz".
+ string selector = 1;
+
+ // Specifies the required permission(s) for the resource referred to by the
+ // field. It requires the field contains a valid resource reference, and
+ // the request must pass the permission checks to proceed. For example,
+ // "resourcemanager.projects.get".
+ string resource_permission = 2;
+
+ // Specifies the resource type for the resource referred to by the field.
+ string resource_type = 3;
+}
+
+// Defines policies applying to an RPC method.
+message MethodPolicy {
+ // Selects a method to which these policies should be enforced, for example,
+ // "google.pubsub.v1.Subscriber.CreateSubscription".
+ //
+ // Refer to [selector][google.api.DocumentationRule.selector] for syntax
+ // details.
+ //
+ // NOTE: This field must not be set in the proto annotation. It will be
+ // automatically filled by the service config compiler .
+ string selector = 9;
+
+ // Policies that are applicable to the request message.
+ repeated FieldPolicy request_policies = 2;
+}
diff --git a/Lib/site-packages/google/api/policy_pb2.py b/Lib/site-packages/google/api/policy_pb2.py
new file mode 100644
index 0000000..d773510
--- /dev/null
+++ b/Lib/site-packages/google/api/policy_pb2.py
@@ -0,0 +1,80 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/policy.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x17google/api/policy.proto\x12\ngoogle.api\x1a google/protobuf/descriptor.proto"S\n\x0b\x46ieldPolicy\x12\x10\n\x08selector\x18\x01 \x01(\t\x12\x1b\n\x13resource_permission\x18\x02 \x01(\t\x12\x15\n\rresource_type\x18\x03 \x01(\t"S\n\x0cMethodPolicy\x12\x10\n\x08selector\x18\t \x01(\t\x12\x31\n\x10request_policies\x18\x02 \x03(\x0b\x32\x17.google.api.FieldPolicy:O\n\x0c\x66ield_policy\x12\x1d.google.protobuf.FieldOptions\x18\xe8\xce\xc1K \x01(\x0b\x32\x17.google.api.FieldPolicy:R\n\rmethod_policy\x12\x1e.google.protobuf.MethodOptions\x18\xb5\x97\x99M \x01(\x0b\x32\x18.google.api.MethodPolicyBp\n\x0e\x63om.google.apiB\x0bPolicyProtoP\x01ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\xf8\x01\x01\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+FIELD_POLICY_FIELD_NUMBER = 158361448
+field_policy = DESCRIPTOR.extensions_by_name["field_policy"]
+METHOD_POLICY_FIELD_NUMBER = 161893301
+method_policy = DESCRIPTOR.extensions_by_name["method_policy"]
+
+_FIELDPOLICY = DESCRIPTOR.message_types_by_name["FieldPolicy"]
+_METHODPOLICY = DESCRIPTOR.message_types_by_name["MethodPolicy"]
+FieldPolicy = _reflection.GeneratedProtocolMessageType(
+ "FieldPolicy",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _FIELDPOLICY,
+ "__module__": "google.api.policy_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.FieldPolicy)
+ },
+)
+_sym_db.RegisterMessage(FieldPolicy)
+
+MethodPolicy = _reflection.GeneratedProtocolMessageType(
+ "MethodPolicy",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _METHODPOLICY,
+ "__module__": "google.api.policy_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.MethodPolicy)
+ },
+)
+_sym_db.RegisterMessage(MethodPolicy)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+ google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(field_policy)
+ google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(
+ method_policy
+ )
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\013PolicyProtoP\001ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\370\001\001\242\002\004GAPI"
+ _FIELDPOLICY._serialized_start = 73
+ _FIELDPOLICY._serialized_end = 156
+ _METHODPOLICY._serialized_start = 158
+ _METHODPOLICY._serialized_end = 241
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/quota.proto b/Lib/site-packages/google/api/quota.proto
new file mode 100644
index 0000000..7ccc102
--- /dev/null
+++ b/Lib/site-packages/google/api/quota.proto
@@ -0,0 +1,184 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig";
+option java_multiple_files = true;
+option java_outer_classname = "QuotaProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// Quota configuration helps to achieve fairness and budgeting in service
+// usage.
+//
+// The metric based quota configuration works this way:
+// - The service configuration defines a set of metrics.
+// - For API calls, the quota.metric_rules maps methods to metrics with
+// corresponding costs.
+// - The quota.limits defines limits on the metrics, which will be used for
+// quota checks at runtime.
+//
+// An example quota configuration in yaml format:
+//
+// quota:
+// limits:
+//
+// - name: apiWriteQpsPerProject
+// metric: library.googleapis.com/write_calls
+// unit: "1/min/{project}" # rate limit for consumer projects
+// values:
+// STANDARD: 10000
+//
+//
+// (The metric rules bind all methods to the read_calls metric,
+// except for the UpdateBook and DeleteBook methods. These two methods
+// are mapped to the write_calls metric, with the UpdateBook method
+// consuming at twice rate as the DeleteBook method.)
+// metric_rules:
+// - selector: "*"
+// metric_costs:
+// library.googleapis.com/read_calls: 1
+// - selector: google.example.library.v1.LibraryService.UpdateBook
+// metric_costs:
+// library.googleapis.com/write_calls: 2
+// - selector: google.example.library.v1.LibraryService.DeleteBook
+// metric_costs:
+// library.googleapis.com/write_calls: 1
+//
+// Corresponding Metric definition:
+//
+// metrics:
+// - name: library.googleapis.com/read_calls
+// display_name: Read requests
+// metric_kind: DELTA
+// value_type: INT64
+//
+// - name: library.googleapis.com/write_calls
+// display_name: Write requests
+// metric_kind: DELTA
+// value_type: INT64
+//
+//
+message Quota {
+ // List of QuotaLimit definitions for the service.
+ repeated QuotaLimit limits = 3;
+
+ // List of MetricRule definitions, each one mapping a selected method to one
+ // or more metrics.
+ repeated MetricRule metric_rules = 4;
+}
+
+// Bind API methods to metrics. Binding a method to a metric causes that
+// metric's configured quota behaviors to apply to the method call.
+message MetricRule {
+ // Selects the methods to which this rule applies.
+ //
+ // Refer to [selector][google.api.DocumentationRule.selector] for syntax
+ // details.
+ string selector = 1;
+
+ // Metrics to update when the selected methods are called, and the associated
+ // cost applied to each metric.
+ //
+ // The key of the map is the metric name, and the values are the amount
+ // increased for the metric against which the quota limits are defined.
+ // The value must not be negative.
+ map metric_costs = 2;
+}
+
+// `QuotaLimit` defines a specific limit that applies over a specified duration
+// for a limit type. There can be at most one limit for a duration and limit
+// type combination defined within a `QuotaGroup`.
+message QuotaLimit {
+ // Name of the quota limit.
+ //
+ // The name must be provided, and it must be unique within the service. The
+ // name can only include alphanumeric characters as well as '-'.
+ //
+ // The maximum length of the limit name is 64 characters.
+ string name = 6;
+
+ // Optional. User-visible, extended description for this quota limit.
+ // Should be used only when more context is needed to understand this limit
+ // than provided by the limit's display name (see: `display_name`).
+ string description = 2;
+
+ // Default number of tokens that can be consumed during the specified
+ // duration. This is the number of tokens assigned when a client
+ // application developer activates the service for his/her project.
+ //
+ // Specifying a value of 0 will block all requests. This can be used if you
+ // are provisioning quota to selected consumers and blocking others.
+ // Similarly, a value of -1 will indicate an unlimited quota. No other
+ // negative values are allowed.
+ //
+ // Used by group-based quotas only.
+ int64 default_limit = 3;
+
+ // Maximum number of tokens that can be consumed during the specified
+ // duration. Client application developers can override the default limit up
+ // to this maximum. If specified, this value cannot be set to a value less
+ // than the default limit. If not specified, it is set to the default limit.
+ //
+ // To allow clients to apply overrides with no upper bound, set this to -1,
+ // indicating unlimited maximum quota.
+ //
+ // Used by group-based quotas only.
+ int64 max_limit = 4;
+
+ // Free tier value displayed in the Developers Console for this limit.
+ // The free tier is the number of tokens that will be subtracted from the
+ // billed amount when billing is enabled.
+ // This field can only be set on a limit with duration "1d", in a billable
+ // group; it is invalid on any other limit. If this field is not set, it
+ // defaults to 0, indicating that there is no free tier for this service.
+ //
+ // Used by group-based quotas only.
+ int64 free_tier = 7;
+
+ // Duration of this limit in textual notation. Must be "100s" or "1d".
+ //
+ // Used by group-based quotas only.
+ string duration = 5;
+
+ // The name of the metric this quota limit applies to. The quota limits with
+ // the same metric will be checked together during runtime. The metric must be
+ // defined within the service config.
+ string metric = 8;
+
+ // Specify the unit of the quota limit. It uses the same syntax as
+ // [Metric.unit][]. The supported unit kinds are determined by the quota
+ // backend system.
+ //
+ // Here are some examples:
+ // * "1/min/{project}" for quota per minute per project.
+ //
+ // Note: the order of unit components is insignificant.
+ // The "1" at the beginning is required to follow the metric unit syntax.
+ string unit = 9;
+
+ // Tiered limit values. You must specify this as a key:value pair, with an
+ // integer value that is the maximum number of requests allowed for the
+ // specified unit. Currently only STANDARD is supported.
+ map values = 10;
+
+ // User-visible display name for this limit.
+ // Optional. If not set, the UI will provide a default display name based on
+ // the quota configuration. This field can be used to override the default
+ // display name generated from the configuration.
+ string display_name = 12;
+}
diff --git a/Lib/site-packages/google/api/quota_pb2.py b/Lib/site-packages/google/api/quota_pb2.py
new file mode 100644
index 0000000..7a83032
--- /dev/null
+++ b/Lib/site-packages/google/api/quota_pb2.py
@@ -0,0 +1,112 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/quota.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x16google/api/quota.proto\x12\ngoogle.api"]\n\x05Quota\x12&\n\x06limits\x18\x03 \x03(\x0b\x32\x16.google.api.QuotaLimit\x12,\n\x0cmetric_rules\x18\x04 \x03(\x0b\x32\x16.google.api.MetricRule"\x91\x01\n\nMetricRule\x12\x10\n\x08selector\x18\x01 \x01(\t\x12=\n\x0cmetric_costs\x18\x02 \x03(\x0b\x32\'.google.api.MetricRule.MetricCostsEntry\x1a\x32\n\x10MetricCostsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x95\x02\n\nQuotaLimit\x12\x0c\n\x04name\x18\x06 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x15\n\rdefault_limit\x18\x03 \x01(\x03\x12\x11\n\tmax_limit\x18\x04 \x01(\x03\x12\x11\n\tfree_tier\x18\x07 \x01(\x03\x12\x10\n\x08\x64uration\x18\x05 \x01(\t\x12\x0e\n\x06metric\x18\x08 \x01(\t\x12\x0c\n\x04unit\x18\t \x01(\t\x12\x32\n\x06values\x18\n \x03(\x0b\x32".google.api.QuotaLimit.ValuesEntry\x12\x14\n\x0c\x64isplay_name\x18\x0c \x01(\t\x1a-\n\x0bValuesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x42l\n\x0e\x63om.google.apiB\nQuotaProtoP\x01ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+_QUOTA = DESCRIPTOR.message_types_by_name["Quota"]
+_METRICRULE = DESCRIPTOR.message_types_by_name["MetricRule"]
+_METRICRULE_METRICCOSTSENTRY = _METRICRULE.nested_types_by_name["MetricCostsEntry"]
+_QUOTALIMIT = DESCRIPTOR.message_types_by_name["QuotaLimit"]
+_QUOTALIMIT_VALUESENTRY = _QUOTALIMIT.nested_types_by_name["ValuesEntry"]
+Quota = _reflection.GeneratedProtocolMessageType(
+ "Quota",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _QUOTA,
+ "__module__": "google.api.quota_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.Quota)
+ },
+)
+_sym_db.RegisterMessage(Quota)
+
+MetricRule = _reflection.GeneratedProtocolMessageType(
+ "MetricRule",
+ (_message.Message,),
+ {
+ "MetricCostsEntry": _reflection.GeneratedProtocolMessageType(
+ "MetricCostsEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _METRICRULE_METRICCOSTSENTRY,
+ "__module__": "google.api.quota_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.MetricRule.MetricCostsEntry)
+ },
+ ),
+ "DESCRIPTOR": _METRICRULE,
+ "__module__": "google.api.quota_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.MetricRule)
+ },
+)
+_sym_db.RegisterMessage(MetricRule)
+_sym_db.RegisterMessage(MetricRule.MetricCostsEntry)
+
+QuotaLimit = _reflection.GeneratedProtocolMessageType(
+ "QuotaLimit",
+ (_message.Message,),
+ {
+ "ValuesEntry": _reflection.GeneratedProtocolMessageType(
+ "ValuesEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _QUOTALIMIT_VALUESENTRY,
+ "__module__": "google.api.quota_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.QuotaLimit.ValuesEntry)
+ },
+ ),
+ "DESCRIPTOR": _QUOTALIMIT,
+ "__module__": "google.api.quota_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.QuotaLimit)
+ },
+)
+_sym_db.RegisterMessage(QuotaLimit)
+_sym_db.RegisterMessage(QuotaLimit.ValuesEntry)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\nQuotaProtoP\001ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\242\002\004GAPI"
+ _METRICRULE_METRICCOSTSENTRY._options = None
+ _METRICRULE_METRICCOSTSENTRY._serialized_options = b"8\001"
+ _QUOTALIMIT_VALUESENTRY._options = None
+ _QUOTALIMIT_VALUESENTRY._serialized_options = b"8\001"
+ _QUOTA._serialized_start = 38
+ _QUOTA._serialized_end = 131
+ _METRICRULE._serialized_start = 134
+ _METRICRULE._serialized_end = 279
+ _METRICRULE_METRICCOSTSENTRY._serialized_start = 229
+ _METRICRULE_METRICCOSTSENTRY._serialized_end = 279
+ _QUOTALIMIT._serialized_start = 282
+ _QUOTALIMIT._serialized_end = 559
+ _QUOTALIMIT_VALUESENTRY._serialized_start = 514
+ _QUOTALIMIT_VALUESENTRY._serialized_end = 559
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/resource.proto b/Lib/site-packages/google/api/resource.proto
new file mode 100644
index 0000000..bf0cbec
--- /dev/null
+++ b/Lib/site-packages/google/api/resource.proto
@@ -0,0 +1,238 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+import "google/protobuf/descriptor.proto";
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations";
+option java_multiple_files = true;
+option java_outer_classname = "ResourceProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+extend google.protobuf.FieldOptions {
+ // An annotation that describes a resource reference, see
+ // [ResourceReference][].
+ google.api.ResourceReference resource_reference = 1055;
+}
+
+extend google.protobuf.FileOptions {
+ // An annotation that describes a resource definition without a corresponding
+ // message; see [ResourceDescriptor][].
+ repeated google.api.ResourceDescriptor resource_definition = 1053;
+}
+
+extend google.protobuf.MessageOptions {
+ // An annotation that describes a resource definition, see
+ // [ResourceDescriptor][].
+ google.api.ResourceDescriptor resource = 1053;
+}
+
+// A simple descriptor of a resource type.
+//
+// ResourceDescriptor annotates a resource message (either by means of a
+// protobuf annotation or use in the service config), and associates the
+// resource's schema, the resource type, and the pattern of the resource name.
+//
+// Example:
+//
+// message Topic {
+// // Indicates this message defines a resource schema.
+// // Declares the resource type in the format of {service}/{kind}.
+// // For Kubernetes resources, the format is {api group}/{kind}.
+// option (google.api.resource) = {
+// type: "pubsub.googleapis.com/Topic"
+// pattern: "projects/{project}/topics/{topic}"
+// };
+// }
+//
+// The ResourceDescriptor Yaml config will look like:
+//
+// resources:
+// - type: "pubsub.googleapis.com/Topic"
+// pattern: "projects/{project}/topics/{topic}"
+//
+// Sometimes, resources have multiple patterns, typically because they can
+// live under multiple parents.
+//
+// Example:
+//
+// message LogEntry {
+// option (google.api.resource) = {
+// type: "logging.googleapis.com/LogEntry"
+// pattern: "projects/{project}/logs/{log}"
+// pattern: "folders/{folder}/logs/{log}"
+// pattern: "organizations/{organization}/logs/{log}"
+// pattern: "billingAccounts/{billing_account}/logs/{log}"
+// };
+// }
+//
+// The ResourceDescriptor Yaml config will look like:
+//
+// resources:
+// - type: 'logging.googleapis.com/LogEntry'
+// pattern: "projects/{project}/logs/{log}"
+// pattern: "folders/{folder}/logs/{log}"
+// pattern: "organizations/{organization}/logs/{log}"
+// pattern: "billingAccounts/{billing_account}/logs/{log}"
+message ResourceDescriptor {
+ // A description of the historical or future-looking state of the
+ // resource pattern.
+ enum History {
+ // The "unset" value.
+ HISTORY_UNSPECIFIED = 0;
+
+ // The resource originally had one pattern and launched as such, and
+ // additional patterns were added later.
+ ORIGINALLY_SINGLE_PATTERN = 1;
+
+ // The resource has one pattern, but the API owner expects to add more
+ // later. (This is the inverse of ORIGINALLY_SINGLE_PATTERN, and prevents
+ // that from being necessary once there are multiple patterns.)
+ FUTURE_MULTI_PATTERN = 2;
+ }
+
+ // A flag representing a specific style that a resource claims to conform to.
+ enum Style {
+ // The unspecified value. Do not use.
+ STYLE_UNSPECIFIED = 0;
+
+ // This resource is intended to be "declarative-friendly".
+ //
+ // Declarative-friendly resources must be more strictly consistent, and
+ // setting this to true communicates to tools that this resource should
+ // adhere to declarative-friendly expectations.
+ //
+ // Note: This is used by the API linter (linter.aip.dev) to enable
+ // additional checks.
+ DECLARATIVE_FRIENDLY = 1;
+ }
+
+ // The resource type. It must be in the format of
+ // {service_name}/{resource_type_kind}. The `resource_type_kind` must be
+ // singular and must not include version numbers.
+ //
+ // Example: `storage.googleapis.com/Bucket`
+ //
+ // The value of the resource_type_kind must follow the regular expression
+ // /[A-Za-z][a-zA-Z0-9]+/. It should start with an upper case character and
+ // should use PascalCase (UpperCamelCase). The maximum number of
+ // characters allowed for the `resource_type_kind` is 100.
+ string type = 1;
+
+ // Optional. The relative resource name pattern associated with this resource
+ // type. The DNS prefix of the full resource name shouldn't be specified here.
+ //
+ // The path pattern must follow the syntax, which aligns with HTTP binding
+ // syntax:
+ //
+ // Template = Segment { "/" Segment } ;
+ // Segment = LITERAL | Variable ;
+ // Variable = "{" LITERAL "}" ;
+ //
+ // Examples:
+ //
+ // - "projects/{project}/topics/{topic}"
+ // - "projects/{project}/knowledgeBases/{knowledge_base}"
+ //
+ // The components in braces correspond to the IDs for each resource in the
+ // hierarchy. It is expected that, if multiple patterns are provided,
+ // the same component name (e.g. "project") refers to IDs of the same
+ // type of resource.
+ repeated string pattern = 2;
+
+ // Optional. The field on the resource that designates the resource name
+ // field. If omitted, this is assumed to be "name".
+ string name_field = 3;
+
+ // Optional. The historical or future-looking state of the resource pattern.
+ //
+ // Example:
+ //
+ // // The InspectTemplate message originally only supported resource
+ // // names with organization, and project was added later.
+ // message InspectTemplate {
+ // option (google.api.resource) = {
+ // type: "dlp.googleapis.com/InspectTemplate"
+ // pattern:
+ // "organizations/{organization}/inspectTemplates/{inspect_template}"
+ // pattern: "projects/{project}/inspectTemplates/{inspect_template}"
+ // history: ORIGINALLY_SINGLE_PATTERN
+ // };
+ // }
+ History history = 4;
+
+ // The plural name used in the resource name and permission names, such as
+ // 'projects' for the resource name of 'projects/{project}' and the permission
+ // name of 'cloudresourcemanager.googleapis.com/projects.get'. It is the same
+ // concept of the `plural` field in k8s CRD spec
+ // https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/
+ //
+ // Note: The plural form is required even for singleton resources. See
+ // https://aip.dev/156
+ string plural = 5;
+
+ // The same concept of the `singular` field in k8s CRD spec
+ // https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/
+ // Such as "project" for the `resourcemanager.googleapis.com/Project` type.
+ string singular = 6;
+
+ // Style flag(s) for this resource.
+ // These indicate that a resource is expected to conform to a given
+ // style. See the specific style flags for additional information.
+ repeated Style style = 10;
+}
+
+// Defines a proto annotation that describes a string field that refers to
+// an API resource.
+message ResourceReference {
+ // The resource type that the annotated field references.
+ //
+ // Example:
+ //
+ // message Subscription {
+ // string topic = 2 [(google.api.resource_reference) = {
+ // type: "pubsub.googleapis.com/Topic"
+ // }];
+ // }
+ //
+ // Occasionally, a field may reference an arbitrary resource. In this case,
+ // APIs use the special value * in their resource reference.
+ //
+ // Example:
+ //
+ // message GetIamPolicyRequest {
+ // string resource = 2 [(google.api.resource_reference) = {
+ // type: "*"
+ // }];
+ // }
+ string type = 1;
+
+ // The resource type of a child collection that the annotated field
+ // references. This is useful for annotating the `parent` field that
+ // doesn't have a fixed resource type.
+ //
+ // Example:
+ //
+ // message ListLogEntriesRequest {
+ // string parent = 1 [(google.api.resource_reference) = {
+ // child_type: "logging.googleapis.com/LogEntry"
+ // };
+ // }
+ string child_type = 2;
+}
diff --git a/Lib/site-packages/google/api/resource_pb2.py b/Lib/site-packages/google/api/resource_pb2.py
new file mode 100644
index 0000000..8add678
--- /dev/null
+++ b/Lib/site-packages/google/api/resource_pb2.py
@@ -0,0 +1,91 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/resource.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x19google/api/resource.proto\x12\ngoogle.api\x1a google/protobuf/descriptor.proto"\xee\x02\n\x12ResourceDescriptor\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0f\n\x07pattern\x18\x02 \x03(\t\x12\x12\n\nname_field\x18\x03 \x01(\t\x12\x37\n\x07history\x18\x04 \x01(\x0e\x32&.google.api.ResourceDescriptor.History\x12\x0e\n\x06plural\x18\x05 \x01(\t\x12\x10\n\x08singular\x18\x06 \x01(\t\x12\x33\n\x05style\x18\n \x03(\x0e\x32$.google.api.ResourceDescriptor.Style"[\n\x07History\x12\x17\n\x13HISTORY_UNSPECIFIED\x10\x00\x12\x1d\n\x19ORIGINALLY_SINGLE_PATTERN\x10\x01\x12\x18\n\x14\x46UTURE_MULTI_PATTERN\x10\x02"8\n\x05Style\x12\x15\n\x11STYLE_UNSPECIFIED\x10\x00\x12\x18\n\x14\x44\x45\x43LARATIVE_FRIENDLY\x10\x01"5\n\x11ResourceReference\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x12\n\nchild_type\x18\x02 \x01(\t:Y\n\x12resource_reference\x12\x1d.google.protobuf.FieldOptions\x18\x9f\x08 \x01(\x0b\x32\x1d.google.api.ResourceReference:Z\n\x13resource_definition\x12\x1c.google.protobuf.FileOptions\x18\x9d\x08 \x03(\x0b\x32\x1e.google.api.ResourceDescriptor:R\n\x08resource\x12\x1f.google.protobuf.MessageOptions\x18\x9d\x08 \x01(\x0b\x32\x1e.google.api.ResourceDescriptorBn\n\x0e\x63om.google.apiB\rResourceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xf8\x01\x01\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+RESOURCE_REFERENCE_FIELD_NUMBER = 1055
+resource_reference = DESCRIPTOR.extensions_by_name["resource_reference"]
+RESOURCE_DEFINITION_FIELD_NUMBER = 1053
+resource_definition = DESCRIPTOR.extensions_by_name["resource_definition"]
+RESOURCE_FIELD_NUMBER = 1053
+resource = DESCRIPTOR.extensions_by_name["resource"]
+
+_RESOURCEDESCRIPTOR = DESCRIPTOR.message_types_by_name["ResourceDescriptor"]
+_RESOURCEREFERENCE = DESCRIPTOR.message_types_by_name["ResourceReference"]
+_RESOURCEDESCRIPTOR_HISTORY = _RESOURCEDESCRIPTOR.enum_types_by_name["History"]
+_RESOURCEDESCRIPTOR_STYLE = _RESOURCEDESCRIPTOR.enum_types_by_name["Style"]
+ResourceDescriptor = _reflection.GeneratedProtocolMessageType(
+ "ResourceDescriptor",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _RESOURCEDESCRIPTOR,
+ "__module__": "google.api.resource_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.ResourceDescriptor)
+ },
+)
+_sym_db.RegisterMessage(ResourceDescriptor)
+
+ResourceReference = _reflection.GeneratedProtocolMessageType(
+ "ResourceReference",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _RESOURCEREFERENCE,
+ "__module__": "google.api.resource_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.ResourceReference)
+ },
+)
+_sym_db.RegisterMessage(ResourceReference)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+ google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(
+ resource_reference
+ )
+ google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(
+ resource_definition
+ )
+ google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(resource)
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\rResourceProtoP\001ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\370\001\001\242\002\004GAPI"
+ _RESOURCEDESCRIPTOR._serialized_start = 76
+ _RESOURCEDESCRIPTOR._serialized_end = 442
+ _RESOURCEDESCRIPTOR_HISTORY._serialized_start = 293
+ _RESOURCEDESCRIPTOR_HISTORY._serialized_end = 384
+ _RESOURCEDESCRIPTOR_STYLE._serialized_start = 386
+ _RESOURCEDESCRIPTOR_STYLE._serialized_end = 442
+ _RESOURCEREFERENCE._serialized_start = 444
+ _RESOURCEREFERENCE._serialized_end = 497
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/routing.proto b/Lib/site-packages/google/api/routing.proto
new file mode 100644
index 0000000..b35289b
--- /dev/null
+++ b/Lib/site-packages/google/api/routing.proto
@@ -0,0 +1,461 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+import "google/protobuf/descriptor.proto";
+
+option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations";
+option java_multiple_files = true;
+option java_outer_classname = "RoutingProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+extend google.protobuf.MethodOptions {
+ // See RoutingRule.
+ google.api.RoutingRule routing = 72295729;
+}
+
+// Specifies the routing information that should be sent along with the request
+// in the form of routing header.
+// **NOTE:** All service configuration rules follow the "last one wins" order.
+//
+// The examples below will apply to an RPC which has the following request type:
+//
+// Message Definition:
+//
+// message Request {
+// // The name of the Table
+// // Values can be of the following formats:
+// // - `projects//tables/
`
+// // - `projects//instances//tables/
`
+// // - `region//zones//tables/
`
+// string table_name = 1;
+//
+// // This value specifies routing for replication.
+// // It can be in the following formats:
+// // - `profiles/`
+// // - a legacy `profile_id` that can be any string
+// string app_profile_id = 2;
+// }
+//
+// Example message:
+//
+// {
+// table_name: projects/proj_foo/instances/instance_bar/table/table_baz,
+// app_profile_id: profiles/prof_qux
+// }
+//
+// The routing header consists of one or multiple key-value pairs. Every key
+// and value must be percent-encoded, and joined together in the format of
+// `key1=value1&key2=value2`.
+// In the examples below I am skipping the percent-encoding for readablity.
+//
+// Example 1
+//
+// Extracting a field from the request to put into the routing header
+// unchanged, with the key equal to the field name.
+//
+// annotation:
+//
+// option (google.api.routing) = {
+// // Take the `app_profile_id`.
+// routing_parameters {
+// field: "app_profile_id"
+// }
+// };
+//
+// result:
+//
+// x-goog-request-params: app_profile_id=profiles/prof_qux
+//
+// Example 2
+//
+// Extracting a field from the request to put into the routing header
+// unchanged, with the key different from the field name.
+//
+// annotation:
+//
+// option (google.api.routing) = {
+// // Take the `app_profile_id`, but name it `routing_id` in the header.
+// routing_parameters {
+// field: "app_profile_id"
+// path_template: "{routing_id=**}"
+// }
+// };
+//
+// result:
+//
+// x-goog-request-params: routing_id=profiles/prof_qux
+//
+// Example 3
+//
+// Extracting a field from the request to put into the routing
+// header, while matching a path template syntax on the field's value.
+//
+// NB: it is more useful to send nothing than to send garbage for the purpose
+// of dynamic routing, since garbage pollutes cache. Thus the matching.
+//
+// Sub-example 3a
+//
+// The field matches the template.
+//
+// annotation:
+//
+// option (google.api.routing) = {
+// // Take the `table_name`, if it's well-formed (with project-based
+// // syntax).
+// routing_parameters {
+// field: "table_name"
+// path_template: "{table_name=projects/*/instances/*/**}"
+// }
+// };
+//
+// result:
+//
+// x-goog-request-params:
+// table_name=projects/proj_foo/instances/instance_bar/table/table_baz
+//
+// Sub-example 3b
+//
+// The field does not match the template.
+//
+// annotation:
+//
+// option (google.api.routing) = {
+// // Take the `table_name`, if it's well-formed (with region-based
+// // syntax).
+// routing_parameters {
+// field: "table_name"
+// path_template: "{table_name=regions/*/zones/*/**}"
+// }
+// };
+//
+// result:
+//
+//
+//
+// Sub-example 3c
+//
+// Multiple alternative conflictingly named path templates are
+// specified. The one that matches is used to construct the header.
+//
+// annotation:
+//
+// option (google.api.routing) = {
+// // Take the `table_name`, if it's well-formed, whether
+// // using the region- or projects-based syntax.
+//
+// routing_parameters {
+// field: "table_name"
+// path_template: "{table_name=regions/*/zones/*/**}"
+// }
+// routing_parameters {
+// field: "table_name"
+// path_template: "{table_name=projects/*/instances/*/**}"
+// }
+// };
+//
+// result:
+//
+// x-goog-request-params:
+// table_name=projects/proj_foo/instances/instance_bar/table/table_baz
+//
+// Example 4
+//
+// Extracting a single routing header key-value pair by matching a
+// template syntax on (a part of) a single request field.
+//
+// annotation:
+//
+// option (google.api.routing) = {
+// // Take just the project id from the `table_name` field.
+// routing_parameters {
+// field: "table_name"
+// path_template: "{routing_id=projects/*}/**"
+// }
+// };
+//
+// result:
+//
+// x-goog-request-params: routing_id=projects/proj_foo
+//
+// Example 5
+//
+// Extracting a single routing header key-value pair by matching
+// several conflictingly named path templates on (parts of) a single request
+// field. The last template to match "wins" the conflict.
+//
+// annotation:
+//
+// option (google.api.routing) = {
+// // If the `table_name` does not have instances information,
+// // take just the project id for routing.
+// // Otherwise take project + instance.
+//
+// routing_parameters {
+// field: "table_name"
+// path_template: "{routing_id=projects/*}/**"
+// }
+// routing_parameters {
+// field: "table_name"
+// path_template: "{routing_id=projects/*/instances/*}/**"
+// }
+// };
+//
+// result:
+//
+// x-goog-request-params:
+// routing_id=projects/proj_foo/instances/instance_bar
+//
+// Example 6
+//
+// Extracting multiple routing header key-value pairs by matching
+// several non-conflicting path templates on (parts of) a single request field.
+//
+// Sub-example 6a
+//
+// Make the templates strict, so that if the `table_name` does not
+// have an instance information, nothing is sent.
+//
+// annotation:
+//
+// option (google.api.routing) = {
+// // The routing code needs two keys instead of one composite
+// // but works only for the tables with the "project-instance" name
+// // syntax.
+//
+// routing_parameters {
+// field: "table_name"
+// path_template: "{project_id=projects/*}/instances/*/**"
+// }
+// routing_parameters {
+// field: "table_name"
+// path_template: "projects/*/{instance_id=instances/*}/**"
+// }
+// };
+//
+// result:
+//
+// x-goog-request-params:
+// project_id=projects/proj_foo&instance_id=instances/instance_bar
+//
+// Sub-example 6b
+//
+// Make the templates loose, so that if the `table_name` does not
+// have an instance information, just the project id part is sent.
+//
+// annotation:
+//
+// option (google.api.routing) = {
+// // The routing code wants two keys instead of one composite
+// // but will work with just the `project_id` for tables without
+// // an instance in the `table_name`.
+//
+// routing_parameters {
+// field: "table_name"
+// path_template: "{project_id=projects/*}/**"
+// }
+// routing_parameters {
+// field: "table_name"
+// path_template: "projects/*/{instance_id=instances/*}/**"
+// }
+// };
+//
+// result (is the same as 6a for our example message because it has the instance
+// information):
+//
+// x-goog-request-params:
+// project_id=projects/proj_foo&instance_id=instances/instance_bar
+//
+// Example 7
+//
+// Extracting multiple routing header key-value pairs by matching
+// several path templates on multiple request fields.
+//
+// NB: note that here there is no way to specify sending nothing if one of the
+// fields does not match its template. E.g. if the `table_name` is in the wrong
+// format, the `project_id` will not be sent, but the `routing_id` will be.
+// The backend routing code has to be aware of that and be prepared to not
+// receive a full complement of keys if it expects multiple.
+//
+// annotation:
+//
+// option (google.api.routing) = {
+// // The routing needs both `project_id` and `routing_id`
+// // (from the `app_profile_id` field) for routing.
+//
+// routing_parameters {
+// field: "table_name"
+// path_template: "{project_id=projects/*}/**"
+// }
+// routing_parameters {
+// field: "app_profile_id"
+// path_template: "{routing_id=**}"
+// }
+// };
+//
+// result:
+//
+// x-goog-request-params:
+// project_id=projects/proj_foo&routing_id=profiles/prof_qux
+//
+// Example 8
+//
+// Extracting a single routing header key-value pair by matching
+// several conflictingly named path templates on several request fields. The
+// last template to match "wins" the conflict.
+//
+// annotation:
+//
+// option (google.api.routing) = {
+// // The `routing_id` can be a project id or a region id depending on
+// // the table name format, but only if the `app_profile_id` is not set.
+// // If `app_profile_id` is set it should be used instead.
+//
+// routing_parameters {
+// field: "table_name"
+// path_template: "{routing_id=projects/*}/**"
+// }
+// routing_parameters {
+// field: "table_name"
+// path_template: "{routing_id=regions/*}/**"
+// }
+// routing_parameters {
+// field: "app_profile_id"
+// path_template: "{routing_id=**}"
+// }
+// };
+//
+// result:
+//
+// x-goog-request-params: routing_id=profiles/prof_qux
+//
+// Example 9
+//
+// Bringing it all together.
+//
+// annotation:
+//
+// option (google.api.routing) = {
+// // For routing both `table_location` and a `routing_id` are needed.
+// //
+// // table_location can be either an instance id or a region+zone id.
+// //
+// // For `routing_id`, take the value of `app_profile_id`
+// // - If it's in the format `profiles/`, send
+// // just the `` part.
+// // - If it's any other literal, send it as is.
+// // If the `app_profile_id` is empty, and the `table_name` starts with
+// // the project_id, send that instead.
+//
+// routing_parameters {
+// field: "table_name"
+// path_template: "projects/*/{table_location=instances/*}/tables/*"
+// }
+// routing_parameters {
+// field: "table_name"
+// path_template: "{table_location=regions/*/zones/*}/tables/*"
+// }
+// routing_parameters {
+// field: "table_name"
+// path_template: "{routing_id=projects/*}/**"
+// }
+// routing_parameters {
+// field: "app_profile_id"
+// path_template: "{routing_id=**}"
+// }
+// routing_parameters {
+// field: "app_profile_id"
+// path_template: "profiles/{routing_id=*}"
+// }
+// };
+//
+// result:
+//
+// x-goog-request-params:
+// table_location=instances/instance_bar&routing_id=prof_qux
+message RoutingRule {
+ // A collection of Routing Parameter specifications.
+ // **NOTE:** If multiple Routing Parameters describe the same key
+ // (via the `path_template` field or via the `field` field when
+ // `path_template` is not provided), "last one wins" rule
+ // determines which Parameter gets used.
+ // See the examples for more details.
+ repeated RoutingParameter routing_parameters = 2;
+}
+
+// A projection from an input message to the GRPC or REST header.
+message RoutingParameter {
+ // A request field to extract the header key-value pair from.
+ string field = 1;
+
+ // A pattern matching the key-value field. Optional.
+ // If not specified, the whole field specified in the `field` field will be
+ // taken as value, and its name used as key. If specified, it MUST contain
+ // exactly one named segment (along with any number of unnamed segments) The
+ // pattern will be matched over the field specified in the `field` field, then
+ // if the match is successful:
+ // - the name of the single named segment will be used as a header name,
+ // - the match value of the segment will be used as a header value;
+ // if the match is NOT successful, nothing will be sent.
+ //
+ // Example:
+ //
+ // -- This is a field in the request message
+ // | that the header value will be extracted from.
+ // |
+ // | -- This is the key name in the
+ // | | routing header.
+ // V |
+ // field: "table_name" v
+ // path_template: "projects/*/{table_location=instances/*}/tables/*"
+ // ^ ^
+ // | |
+ // In the {} brackets is the pattern that -- |
+ // specifies what to extract from the |
+ // field as a value to be sent. |
+ // |
+ // The string in the field must match the whole pattern --
+ // before brackets, inside brackets, after brackets.
+ //
+ // When looking at this specific example, we can see that:
+ // - A key-value pair with the key `table_location`
+ // and the value matching `instances/*` should be added
+ // to the x-goog-request-params routing header.
+ // - The value is extracted from the request message's `table_name` field
+ // if it matches the full pattern specified:
+ // `projects/*/instances/*/tables/*`.
+ //
+ // **NB:** If the `path_template` field is not provided, the key name is
+ // equal to the field name, and the whole field should be sent as a value.
+ // This makes the pattern for the field and the value functionally equivalent
+ // to `**`, and the configuration
+ //
+ // {
+ // field: "table_name"
+ // }
+ //
+ // is a functionally equivalent shorthand to:
+ //
+ // {
+ // field: "table_name"
+ // path_template: "{table_name=**}"
+ // }
+ //
+ // See Example 1 for more details.
+ string path_template = 2;
+}
diff --git a/Lib/site-packages/google/api/routing_pb2.py b/Lib/site-packages/google/api/routing_pb2.py
new file mode 100644
index 0000000..4af8c0d
--- /dev/null
+++ b/Lib/site-packages/google/api/routing_pb2.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/routing.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x18google/api/routing.proto\x12\ngoogle.api\x1a google/protobuf/descriptor.proto"G\n\x0bRoutingRule\x12\x38\n\x12routing_parameters\x18\x02 \x03(\x0b\x32\x1c.google.api.RoutingParameter"8\n\x10RoutingParameter\x12\r\n\x05\x66ield\x18\x01 \x01(\t\x12\x15\n\rpath_template\x18\x02 \x01(\t:K\n\x07routing\x12\x1e.google.protobuf.MethodOptions\x18\xb1\xca\xbc" \x01(\x0b\x32\x17.google.api.RoutingRuleBj\n\x0e\x63om.google.apiB\x0cRoutingProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+ROUTING_FIELD_NUMBER = 72295729
+routing = DESCRIPTOR.extensions_by_name["routing"]
+
+_ROUTINGRULE = DESCRIPTOR.message_types_by_name["RoutingRule"]
+_ROUTINGPARAMETER = DESCRIPTOR.message_types_by_name["RoutingParameter"]
+RoutingRule = _reflection.GeneratedProtocolMessageType(
+ "RoutingRule",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _ROUTINGRULE,
+ "__module__": "google.api.routing_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.RoutingRule)
+ },
+)
+_sym_db.RegisterMessage(RoutingRule)
+
+RoutingParameter = _reflection.GeneratedProtocolMessageType(
+ "RoutingParameter",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _ROUTINGPARAMETER,
+ "__module__": "google.api.routing_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.RoutingParameter)
+ },
+)
+_sym_db.RegisterMessage(RoutingParameter)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+ google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(routing)
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\014RoutingProtoP\001ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\242\002\004GAPI"
+ _ROUTINGRULE._serialized_start = 74
+ _ROUTINGRULE._serialized_end = 145
+ _ROUTINGPARAMETER._serialized_start = 147
+ _ROUTINGPARAMETER._serialized_end = 203
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/service.proto b/Lib/site-packages/google/api/service.proto
new file mode 100644
index 0000000..3de5b66
--- /dev/null
+++ b/Lib/site-packages/google/api/service.proto
@@ -0,0 +1,191 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+import "google/api/auth.proto";
+import "google/api/backend.proto";
+import "google/api/billing.proto";
+import "google/api/client.proto";
+import "google/api/context.proto";
+import "google/api/control.proto";
+import "google/api/documentation.proto";
+import "google/api/endpoint.proto";
+import "google/api/http.proto";
+import "google/api/log.proto";
+import "google/api/logging.proto";
+import "google/api/metric.proto";
+import "google/api/monitored_resource.proto";
+import "google/api/monitoring.proto";
+import "google/api/quota.proto";
+import "google/api/source_info.proto";
+import "google/api/system_parameter.proto";
+import "google/api/usage.proto";
+import "google/protobuf/api.proto";
+import "google/protobuf/type.proto";
+import "google/protobuf/wrappers.proto";
+
+option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig";
+option java_multiple_files = true;
+option java_outer_classname = "ServiceProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// `Service` is the root object of Google API service configuration (service
+// config). It describes the basic information about a logical service,
+// such as the service name and the user-facing title, and delegates other
+// aspects to sub-sections. Each sub-section is either a proto message or a
+// repeated proto message that configures a specific aspect, such as auth.
+// For more information, see each proto message definition.
+//
+// Example:
+//
+// type: google.api.Service
+// name: calendar.googleapis.com
+// title: Google Calendar API
+// apis:
+// - name: google.calendar.v3.Calendar
+//
+// visibility:
+// rules:
+// - selector: "google.calendar.v3.*"
+// restriction: PREVIEW
+// backend:
+// rules:
+// - selector: "google.calendar.v3.*"
+// address: calendar.example.com
+//
+// authentication:
+// providers:
+// - id: google_calendar_auth
+// jwks_uri: https://www.googleapis.com/oauth2/v1/certs
+// issuer: https://securetoken.google.com
+// rules:
+// - selector: "*"
+// requirements:
+// provider_id: google_calendar_auth
+message Service {
+ // The service name, which is a DNS-like logical identifier for the
+ // service, such as `calendar.googleapis.com`. The service name
+ // typically goes through DNS verification to make sure the owner
+ // of the service also owns the DNS name.
+ string name = 1;
+
+ // The product title for this service, it is the name displayed in Google
+ // Cloud Console.
+ string title = 2;
+
+ // The Google project that owns this service.
+ string producer_project_id = 22;
+
+ // A unique ID for a specific instance of this message, typically assigned
+ // by the client for tracking purpose. Must be no longer than 63 characters
+ // and only lower case letters, digits, '.', '_' and '-' are allowed. If
+ // empty, the server may choose to generate one instead.
+ string id = 33;
+
+ // A list of API interfaces exported by this service. Only the `name` field
+ // of the [google.protobuf.Api][google.protobuf.Api] needs to be provided by
+ // the configuration author, as the remaining fields will be derived from the
+ // IDL during the normalization process. It is an error to specify an API
+ // interface here which cannot be resolved against the associated IDL files.
+ repeated google.protobuf.Api apis = 3;
+
+ // A list of all proto message types included in this API service.
+ // Types referenced directly or indirectly by the `apis` are automatically
+ // included. Messages which are not referenced but shall be included, such as
+ // types used by the `google.protobuf.Any` type, should be listed here by
+ // name by the configuration author. Example:
+ //
+ // types:
+ // - name: google.protobuf.Int32
+ repeated google.protobuf.Type types = 4;
+
+ // A list of all enum types included in this API service. Enums referenced
+ // directly or indirectly by the `apis` are automatically included. Enums
+ // which are not referenced but shall be included should be listed here by
+ // name by the configuration author. Example:
+ //
+ // enums:
+ // - name: google.someapi.v1.SomeEnum
+ repeated google.protobuf.Enum enums = 5;
+
+ // Additional API documentation.
+ Documentation documentation = 6;
+
+ // API backend configuration.
+ Backend backend = 8;
+
+ // HTTP configuration.
+ Http http = 9;
+
+ // Quota configuration.
+ Quota quota = 10;
+
+ // Auth configuration.
+ Authentication authentication = 11;
+
+ // Context configuration.
+ Context context = 12;
+
+ // Configuration controlling usage of this service.
+ Usage usage = 15;
+
+ // Configuration for network endpoints. If this is empty, then an endpoint
+ // with the same name as the service is automatically generated to service all
+ // defined APIs.
+ repeated Endpoint endpoints = 18;
+
+ // Configuration for the service control plane.
+ Control control = 21;
+
+ // Defines the logs used by this service.
+ repeated LogDescriptor logs = 23;
+
+ // Defines the metrics used by this service.
+ repeated MetricDescriptor metrics = 24;
+
+ // Defines the monitored resources used by this service. This is required
+ // by the [Service.monitoring][google.api.Service.monitoring] and
+ // [Service.logging][google.api.Service.logging] configurations.
+ repeated MonitoredResourceDescriptor monitored_resources = 25;
+
+ // Billing configuration.
+ Billing billing = 26;
+
+ // Logging configuration.
+ Logging logging = 27;
+
+ // Monitoring configuration.
+ Monitoring monitoring = 28;
+
+ // System parameter configuration.
+ SystemParameters system_parameters = 29;
+
+ // Output only. The source information for this configuration if available.
+ SourceInfo source_info = 37;
+
+ // Settings for [Google Cloud Client
+ // libraries](https://cloud.google.com/apis/docs/cloud-client-libraries)
+ // generated from APIs defined as protocol buffers.
+ Publishing publishing = 45;
+
+ // Obsolete. Do not use.
+ //
+ // This field has no semantic meaning. The service config compiler always
+ // sets this field to `3`.
+ google.protobuf.UInt32Value config_version = 20;
+}
diff --git a/Lib/site-packages/google/api/service_pb2.py b/Lib/site-packages/google/api/service_pb2.py
new file mode 100644
index 0000000..9073b8c
--- /dev/null
+++ b/Lib/site-packages/google/api/service_pb2.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/service.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import auth_pb2 as google_dot_api_dot_auth__pb2
+from google.api import backend_pb2 as google_dot_api_dot_backend__pb2
+from google.api import billing_pb2 as google_dot_api_dot_billing__pb2
+from google.api import client_pb2 as google_dot_api_dot_client__pb2
+from google.api import context_pb2 as google_dot_api_dot_context__pb2
+from google.api import control_pb2 as google_dot_api_dot_control__pb2
+from google.api import documentation_pb2 as google_dot_api_dot_documentation__pb2
+from google.api import endpoint_pb2 as google_dot_api_dot_endpoint__pb2
+from google.api import http_pb2 as google_dot_api_dot_http__pb2
+from google.api import log_pb2 as google_dot_api_dot_log__pb2
+from google.api import logging_pb2 as google_dot_api_dot_logging__pb2
+from google.api import metric_pb2 as google_dot_api_dot_metric__pb2
+from google.api import (
+ monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2,
+)
+from google.api import monitoring_pb2 as google_dot_api_dot_monitoring__pb2
+from google.api import quota_pb2 as google_dot_api_dot_quota__pb2
+from google.api import source_info_pb2 as google_dot_api_dot_source__info__pb2
+from google.api import system_parameter_pb2 as google_dot_api_dot_system__parameter__pb2
+from google.api import usage_pb2 as google_dot_api_dot_usage__pb2
+from google.protobuf import api_pb2 as google_dot_protobuf_dot_api__pb2
+from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2
+from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b"\n\x18google/api/service.proto\x12\ngoogle.api\x1a\x15google/api/auth.proto\x1a\x18google/api/backend.proto\x1a\x18google/api/billing.proto\x1a\x17google/api/client.proto\x1a\x18google/api/context.proto\x1a\x18google/api/control.proto\x1a\x1egoogle/api/documentation.proto\x1a\x19google/api/endpoint.proto\x1a\x15google/api/http.proto\x1a\x14google/api/log.proto\x1a\x18google/api/logging.proto\x1a\x17google/api/metric.proto\x1a#google/api/monitored_resource.proto\x1a\x1bgoogle/api/monitoring.proto\x1a\x16google/api/quota.proto\x1a\x1cgoogle/api/source_info.proto\x1a!google/api/system_parameter.proto\x1a\x16google/api/usage.proto\x1a\x19google/protobuf/api.proto\x1a\x1agoogle/protobuf/type.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\x82\x08\n\x07Service\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x1b\n\x13producer_project_id\x18\x16 \x01(\t\x12\n\n\x02id\x18! \x01(\t\x12\"\n\x04\x61pis\x18\x03 \x03(\x0b\x32\x14.google.protobuf.Api\x12$\n\x05types\x18\x04 \x03(\x0b\x32\x15.google.protobuf.Type\x12$\n\x05\x65nums\x18\x05 \x03(\x0b\x32\x15.google.protobuf.Enum\x12\x30\n\rdocumentation\x18\x06 \x01(\x0b\x32\x19.google.api.Documentation\x12$\n\x07\x62\x61\x63kend\x18\x08 \x01(\x0b\x32\x13.google.api.Backend\x12\x1e\n\x04http\x18\t \x01(\x0b\x32\x10.google.api.Http\x12 \n\x05quota\x18\n \x01(\x0b\x32\x11.google.api.Quota\x12\x32\n\x0e\x61uthentication\x18\x0b \x01(\x0b\x32\x1a.google.api.Authentication\x12$\n\x07\x63ontext\x18\x0c \x01(\x0b\x32\x13.google.api.Context\x12 \n\x05usage\x18\x0f \x01(\x0b\x32\x11.google.api.Usage\x12'\n\tendpoints\x18\x12 \x03(\x0b\x32\x14.google.api.Endpoint\x12$\n\x07\x63ontrol\x18\x15 \x01(\x0b\x32\x13.google.api.Control\x12'\n\x04logs\x18\x17 \x03(\x0b\x32\x19.google.api.LogDescriptor\x12-\n\x07metrics\x18\x18 \x03(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x44\n\x13monitored_resources\x18\x19 \x03(\x0b\x32'.google.api.MonitoredResourceDescriptor\x12$\n\x07\x62illing\x18\x1a \x01(\x0b\x32\x13.google.api.Billing\x12$\n\x07logging\x18\x1b \x01(\x0b\x32\x13.google.api.Logging\x12*\n\nmonitoring\x18\x1c \x01(\x0b\x32\x16.google.api.Monitoring\x12\x37\n\x11system_parameters\x18\x1d \x01(\x0b\x32\x1c.google.api.SystemParameters\x12+\n\x0bsource_info\x18% \x01(\x0b\x32\x16.google.api.SourceInfo\x12*\n\npublishing\x18- \x01(\x0b\x32\x16.google.api.Publishing\x12\x34\n\x0e\x63onfig_version\x18\x14 \x01(\x0b\x32\x1c.google.protobuf.UInt32ValueBn\n\x0e\x63om.google.apiB\x0cServiceProtoP\x01ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\xa2\x02\x04GAPIb\x06proto3"
+)
+
+
+_SERVICE = DESCRIPTOR.message_types_by_name["Service"]
+Service = _reflection.GeneratedProtocolMessageType(
+ "Service",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _SERVICE,
+ "__module__": "google.api.service_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.Service)
+ },
+)
+_sym_db.RegisterMessage(Service)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\014ServiceProtoP\001ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\242\002\004GAPI"
+ _SERVICE._serialized_start = 614
+ _SERVICE._serialized_end = 1640
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/source_info.proto b/Lib/site-packages/google/api/source_info.proto
new file mode 100644
index 0000000..51fe279
--- /dev/null
+++ b/Lib/site-packages/google/api/source_info.proto
@@ -0,0 +1,31 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+import "google/protobuf/any.proto";
+
+option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig";
+option java_multiple_files = true;
+option java_outer_classname = "SourceInfoProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// Source information used to create a Service Config
+message SourceInfo {
+ // All files used during config generation.
+ repeated google.protobuf.Any source_files = 1;
+}
diff --git a/Lib/site-packages/google/api/source_info_pb2.py b/Lib/site-packages/google/api/source_info_pb2.py
new file mode 100644
index 0000000..ee33931
--- /dev/null
+++ b/Lib/site-packages/google/api/source_info_pb2.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/source_info.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x1cgoogle/api/source_info.proto\x12\ngoogle.api\x1a\x19google/protobuf/any.proto"8\n\nSourceInfo\x12*\n\x0csource_files\x18\x01 \x03(\x0b\x32\x14.google.protobuf.AnyBq\n\x0e\x63om.google.apiB\x0fSourceInfoProtoP\x01ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+_SOURCEINFO = DESCRIPTOR.message_types_by_name["SourceInfo"]
+SourceInfo = _reflection.GeneratedProtocolMessageType(
+ "SourceInfo",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _SOURCEINFO,
+ "__module__": "google.api.source_info_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.SourceInfo)
+ },
+)
+_sym_db.RegisterMessage(SourceInfo)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\017SourceInfoProtoP\001ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\242\002\004GAPI"
+ _SOURCEINFO._serialized_start = 71
+ _SOURCEINFO._serialized_end = 127
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/system_parameter.proto b/Lib/site-packages/google/api/system_parameter.proto
new file mode 100644
index 0000000..8d29057
--- /dev/null
+++ b/Lib/site-packages/google/api/system_parameter.proto
@@ -0,0 +1,96 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig";
+option java_multiple_files = true;
+option java_outer_classname = "SystemParameterProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// ### System parameter configuration
+//
+// A system parameter is a special kind of parameter defined by the API
+// system, not by an individual API. It is typically mapped to an HTTP header
+// and/or a URL query parameter. This configuration specifies which methods
+// change the names of the system parameters.
+message SystemParameters {
+ // Define system parameters.
+ //
+ // The parameters defined here will override the default parameters
+ // implemented by the system. If this field is missing from the service
+ // config, default system parameters will be used. Default system parameters
+ // and names is implementation-dependent.
+ //
+ // Example: define api key for all methods
+ //
+ // system_parameters
+ // rules:
+ // - selector: "*"
+ // parameters:
+ // - name: api_key
+ // url_query_parameter: api_key
+ //
+ //
+ // Example: define 2 api key names for a specific method.
+ //
+ // system_parameters
+ // rules:
+ // - selector: "/ListShelves"
+ // parameters:
+ // - name: api_key
+ // http_header: Api-Key1
+ // - name: api_key
+ // http_header: Api-Key2
+ //
+ // **NOTE:** All service configuration rules follow "last one wins" order.
+ repeated SystemParameterRule rules = 1;
+}
+
+// Define a system parameter rule mapping system parameter definitions to
+// methods.
+message SystemParameterRule {
+ // Selects the methods to which this rule applies. Use '*' to indicate all
+ // methods in all APIs.
+ //
+ // Refer to [selector][google.api.DocumentationRule.selector] for syntax
+ // details.
+ string selector = 1;
+
+ // Define parameters. Multiple names may be defined for a parameter.
+ // For a given method call, only one of them should be used. If multiple
+ // names are used the behavior is implementation-dependent.
+ // If none of the specified names are present the behavior is
+ // parameter-dependent.
+ repeated SystemParameter parameters = 2;
+}
+
+// Define a parameter's name and location. The parameter may be passed as either
+// an HTTP header or a URL query parameter, and if both are passed the behavior
+// is implementation-dependent.
+message SystemParameter {
+ // Define the name of the parameter, such as "api_key" . It is case sensitive.
+ string name = 1;
+
+ // Define the HTTP header name to use for the parameter. It is case
+ // insensitive.
+ string http_header = 2;
+
+ // Define the URL query parameter name to use for the parameter. It is case
+ // sensitive.
+ string url_query_parameter = 3;
+}
diff --git a/Lib/site-packages/google/api/system_parameter_pb2.py b/Lib/site-packages/google/api/system_parameter_pb2.py
new file mode 100644
index 0000000..fcea08e
--- /dev/null
+++ b/Lib/site-packages/google/api/system_parameter_pb2.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/system_parameter.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n!google/api/system_parameter.proto\x12\ngoogle.api"B\n\x10SystemParameters\x12.\n\x05rules\x18\x01 \x03(\x0b\x32\x1f.google.api.SystemParameterRule"X\n\x13SystemParameterRule\x12\x10\n\x08selector\x18\x01 \x01(\t\x12/\n\nparameters\x18\x02 \x03(\x0b\x32\x1b.google.api.SystemParameter"Q\n\x0fSystemParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0bhttp_header\x18\x02 \x01(\t\x12\x1b\n\x13url_query_parameter\x18\x03 \x01(\tBv\n\x0e\x63om.google.apiB\x14SystemParameterProtoP\x01ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+_SYSTEMPARAMETERS = DESCRIPTOR.message_types_by_name["SystemParameters"]
+_SYSTEMPARAMETERRULE = DESCRIPTOR.message_types_by_name["SystemParameterRule"]
+_SYSTEMPARAMETER = DESCRIPTOR.message_types_by_name["SystemParameter"]
+SystemParameters = _reflection.GeneratedProtocolMessageType(
+ "SystemParameters",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _SYSTEMPARAMETERS,
+ "__module__": "google.api.system_parameter_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.SystemParameters)
+ },
+)
+_sym_db.RegisterMessage(SystemParameters)
+
+SystemParameterRule = _reflection.GeneratedProtocolMessageType(
+ "SystemParameterRule",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _SYSTEMPARAMETERRULE,
+ "__module__": "google.api.system_parameter_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.SystemParameterRule)
+ },
+)
+_sym_db.RegisterMessage(SystemParameterRule)
+
+SystemParameter = _reflection.GeneratedProtocolMessageType(
+ "SystemParameter",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _SYSTEMPARAMETER,
+ "__module__": "google.api.system_parameter_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.SystemParameter)
+ },
+)
+_sym_db.RegisterMessage(SystemParameter)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\024SystemParameterProtoP\001ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\242\002\004GAPI"
+ _SYSTEMPARAMETERS._serialized_start = 49
+ _SYSTEMPARAMETERS._serialized_end = 115
+ _SYSTEMPARAMETERRULE._serialized_start = 117
+ _SYSTEMPARAMETERRULE._serialized_end = 205
+ _SYSTEMPARAMETER._serialized_start = 207
+ _SYSTEMPARAMETER._serialized_end = 288
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/usage.proto b/Lib/site-packages/google/api/usage.proto
new file mode 100644
index 0000000..b9384b4
--- /dev/null
+++ b/Lib/site-packages/google/api/usage.proto
@@ -0,0 +1,96 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig";
+option java_multiple_files = true;
+option java_outer_classname = "UsageProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+// Configuration controlling usage of a service.
+message Usage {
+ // Requirements that must be satisfied before a consumer project can use the
+ // service. Each requirement is of the form /;
+ // for example 'serviceusage.googleapis.com/billing-enabled'.
+ //
+ // For Google APIs, a Terms of Service requirement must be included here.
+ // Google Cloud APIs must include "serviceusage.googleapis.com/tos/cloud".
+ // Other Google APIs should include
+ // "serviceusage.googleapis.com/tos/universal". Additional ToS can be
+ // included based on the business needs.
+ repeated string requirements = 1;
+
+ // A list of usage rules that apply to individual API methods.
+ //
+ // **NOTE:** All service configuration rules follow "last one wins" order.
+ repeated UsageRule rules = 6;
+
+ // The full resource name of a channel used for sending notifications to the
+ // service producer.
+ //
+ // Google Service Management currently only supports
+ // [Google Cloud Pub/Sub](https://cloud.google.com/pubsub) as a notification
+ // channel. To use Google Cloud Pub/Sub as the channel, this must be the name
+ // of a Cloud Pub/Sub topic that uses the Cloud Pub/Sub topic name format
+ // documented in https://cloud.google.com/pubsub/docs/overview.
+ string producer_notification_channel = 7;
+}
+
+// Usage configuration rules for the service.
+//
+// NOTE: Under development.
+//
+//
+// Use this rule to configure unregistered calls for the service. Unregistered
+// calls are calls that do not contain consumer project identity.
+// (Example: calls that do not contain an API key).
+// By default, API methods do not allow unregistered calls, and each method call
+// must be identified by a consumer project identity. Use this rule to
+// allow/disallow unregistered calls.
+//
+// Example of an API that wants to allow unregistered calls for entire service.
+//
+// usage:
+// rules:
+// - selector: "*"
+// allow_unregistered_calls: true
+//
+// Example of a method that wants to allow unregistered calls.
+//
+// usage:
+// rules:
+// - selector: "google.example.library.v1.LibraryService.CreateBook"
+// allow_unregistered_calls: true
+message UsageRule {
+ // Selects the methods to which this rule applies. Use '*' to indicate all
+ // methods in all APIs.
+ //
+ // Refer to [selector][google.api.DocumentationRule.selector] for syntax
+ // details.
+ string selector = 1;
+
+ // If true, the selected method allows unregistered calls, e.g. calls
+ // that don't identify any user or application.
+ bool allow_unregistered_calls = 2;
+
+ // If true, the selected method should skip service control and the control
+ // plane features, such as quota and billing, will not be available.
+ // This flag is used by Google Cloud Endpoints to bypass checks for internal
+ // methods, such as service health check methods.
+ bool skip_service_control = 3;
+}
diff --git a/Lib/site-packages/google/api/usage_pb2.py b/Lib/site-packages/google/api/usage_pb2.py
new file mode 100644
index 0000000..051741f
--- /dev/null
+++ b/Lib/site-packages/google/api/usage_pb2.py
@@ -0,0 +1,68 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/usage.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x16google/api/usage.proto\x12\ngoogle.api"j\n\x05Usage\x12\x14\n\x0crequirements\x18\x01 \x03(\t\x12$\n\x05rules\x18\x06 \x03(\x0b\x32\x15.google.api.UsageRule\x12%\n\x1dproducer_notification_channel\x18\x07 \x01(\t"]\n\tUsageRule\x12\x10\n\x08selector\x18\x01 \x01(\t\x12 \n\x18\x61llow_unregistered_calls\x18\x02 \x01(\x08\x12\x1c\n\x14skip_service_control\x18\x03 \x01(\x08\x42l\n\x0e\x63om.google.apiB\nUsageProtoP\x01ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+_USAGE = DESCRIPTOR.message_types_by_name["Usage"]
+_USAGERULE = DESCRIPTOR.message_types_by_name["UsageRule"]
+Usage = _reflection.GeneratedProtocolMessageType(
+ "Usage",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _USAGE,
+ "__module__": "google.api.usage_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.Usage)
+ },
+)
+_sym_db.RegisterMessage(Usage)
+
+UsageRule = _reflection.GeneratedProtocolMessageType(
+ "UsageRule",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _USAGERULE,
+ "__module__": "google.api.usage_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.UsageRule)
+ },
+)
+_sym_db.RegisterMessage(UsageRule)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\nUsageProtoP\001ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\242\002\004GAPI"
+ _USAGE._serialized_start = 38
+ _USAGE._serialized_end = 144
+ _USAGERULE._serialized_start = 146
+ _USAGERULE._serialized_end = 239
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api/visibility.proto b/Lib/site-packages/google/api/visibility.proto
new file mode 100644
index 0000000..8b1f946
--- /dev/null
+++ b/Lib/site-packages/google/api/visibility.proto
@@ -0,0 +1,113 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.api;
+
+import "google/protobuf/descriptor.proto";
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/api/visibility;visibility";
+option java_multiple_files = true;
+option java_outer_classname = "VisibilityProto";
+option java_package = "com.google.api";
+option objc_class_prefix = "GAPI";
+
+extend google.protobuf.EnumOptions {
+ // See `VisibilityRule`.
+ google.api.VisibilityRule enum_visibility = 72295727;
+}
+
+extend google.protobuf.EnumValueOptions {
+ // See `VisibilityRule`.
+ google.api.VisibilityRule value_visibility = 72295727;
+}
+
+extend google.protobuf.FieldOptions {
+ // See `VisibilityRule`.
+ google.api.VisibilityRule field_visibility = 72295727;
+}
+
+extend google.protobuf.MessageOptions {
+ // See `VisibilityRule`.
+ google.api.VisibilityRule message_visibility = 72295727;
+}
+
+extend google.protobuf.MethodOptions {
+ // See `VisibilityRule`.
+ google.api.VisibilityRule method_visibility = 72295727;
+}
+
+extend google.protobuf.ServiceOptions {
+ // See `VisibilityRule`.
+ google.api.VisibilityRule api_visibility = 72295727;
+}
+
+// `Visibility` restricts service consumer's access to service elements,
+// such as whether an application can call a visibility-restricted method.
+// The restriction is expressed by applying visibility labels on service
+// elements. The visibility labels are elsewhere linked to service consumers.
+//
+// A service can define multiple visibility labels, but a service consumer
+// should be granted at most one visibility label. Multiple visibility
+// labels for a single service consumer are not supported.
+//
+// If an element and all its parents have no visibility label, its visibility
+// is unconditionally granted.
+//
+// Example:
+//
+// visibility:
+// rules:
+// - selector: google.calendar.Calendar.EnhancedSearch
+// restriction: PREVIEW
+// - selector: google.calendar.Calendar.Delegate
+// restriction: INTERNAL
+//
+// Here, all methods are publicly visible except for the restricted methods
+// EnhancedSearch and Delegate.
+message Visibility {
+ // A list of visibility rules that apply to individual API elements.
+ //
+ // **NOTE:** All service configuration rules follow "last one wins" order.
+ repeated VisibilityRule rules = 1;
+}
+
+// A visibility rule provides visibility configuration for an individual API
+// element.
+message VisibilityRule {
+ // Selects methods, messages, fields, enums, etc. to which this rule applies.
+ //
+ // Refer to [selector][google.api.DocumentationRule.selector] for syntax
+ // details.
+ string selector = 1;
+
+ // A comma-separated list of visibility labels that apply to the `selector`.
+ // Any of the listed labels can be used to grant the visibility.
+ //
+ // If a rule has multiple labels, removing one of the labels but not all of
+ // them can break clients.
+ //
+ // Example:
+ //
+ // visibility:
+ // rules:
+ // - selector: google.calendar.Calendar.EnhancedSearch
+ // restriction: INTERNAL, PREVIEW
+ //
+ // Removing INTERNAL from this restriction will break clients that rely on
+ // this method and only had access to it through INTERNAL.
+ string restriction = 2;
+}
diff --git a/Lib/site-packages/google/api/visibility_pb2.py b/Lib/site-packages/google/api/visibility_pb2.py
new file mode 100644
index 0000000..c0db839
--- /dev/null
+++ b/Lib/site-packages/google/api/visibility_pb2.py
@@ -0,0 +1,102 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/api/visibility.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x1bgoogle/api/visibility.proto\x12\ngoogle.api\x1a google/protobuf/descriptor.proto"7\n\nVisibility\x12)\n\x05rules\x18\x01 \x03(\x0b\x32\x1a.google.api.VisibilityRule"7\n\x0eVisibilityRule\x12\x10\n\x08selector\x18\x01 \x01(\t\x12\x13\n\x0brestriction\x18\x02 \x01(\t:T\n\x0f\x65num_visibility\x12\x1c.google.protobuf.EnumOptions\x18\xaf\xca\xbc" \x01(\x0b\x32\x1a.google.api.VisibilityRule:Z\n\x10value_visibility\x12!.google.protobuf.EnumValueOptions\x18\xaf\xca\xbc" \x01(\x0b\x32\x1a.google.api.VisibilityRule:V\n\x10\x66ield_visibility\x12\x1d.google.protobuf.FieldOptions\x18\xaf\xca\xbc" \x01(\x0b\x32\x1a.google.api.VisibilityRule:Z\n\x12message_visibility\x12\x1f.google.protobuf.MessageOptions\x18\xaf\xca\xbc" \x01(\x0b\x32\x1a.google.api.VisibilityRule:X\n\x11method_visibility\x12\x1e.google.protobuf.MethodOptions\x18\xaf\xca\xbc" \x01(\x0b\x32\x1a.google.api.VisibilityRule:V\n\x0e\x61pi_visibility\x12\x1f.google.protobuf.ServiceOptions\x18\xaf\xca\xbc" \x01(\x0b\x32\x1a.google.api.VisibilityRuleBn\n\x0e\x63om.google.apiB\x0fVisibilityProtoP\x01Z?google.golang.org/genproto/googleapis/api/visibility;visibility\xf8\x01\x01\xa2\x02\x04GAPIb\x06proto3'
+)
+
+
+ENUM_VISIBILITY_FIELD_NUMBER = 72295727
+enum_visibility = DESCRIPTOR.extensions_by_name["enum_visibility"]
+VALUE_VISIBILITY_FIELD_NUMBER = 72295727
+value_visibility = DESCRIPTOR.extensions_by_name["value_visibility"]
+FIELD_VISIBILITY_FIELD_NUMBER = 72295727
+field_visibility = DESCRIPTOR.extensions_by_name["field_visibility"]
+MESSAGE_VISIBILITY_FIELD_NUMBER = 72295727
+message_visibility = DESCRIPTOR.extensions_by_name["message_visibility"]
+METHOD_VISIBILITY_FIELD_NUMBER = 72295727
+method_visibility = DESCRIPTOR.extensions_by_name["method_visibility"]
+API_VISIBILITY_FIELD_NUMBER = 72295727
+api_visibility = DESCRIPTOR.extensions_by_name["api_visibility"]
+
+_VISIBILITY = DESCRIPTOR.message_types_by_name["Visibility"]
+_VISIBILITYRULE = DESCRIPTOR.message_types_by_name["VisibilityRule"]
+Visibility = _reflection.GeneratedProtocolMessageType(
+ "Visibility",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _VISIBILITY,
+ "__module__": "google.api.visibility_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.Visibility)
+ },
+)
+_sym_db.RegisterMessage(Visibility)
+
+VisibilityRule = _reflection.GeneratedProtocolMessageType(
+ "VisibilityRule",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _VISIBILITYRULE,
+ "__module__": "google.api.visibility_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.VisibilityRule)
+ },
+)
+_sym_db.RegisterMessage(VisibilityRule)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+ google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(
+ enum_visibility
+ )
+ google_dot_protobuf_dot_descriptor__pb2.EnumValueOptions.RegisterExtension(
+ value_visibility
+ )
+ google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(
+ field_visibility
+ )
+ google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(
+ message_visibility
+ )
+ google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(
+ method_visibility
+ )
+ google_dot_protobuf_dot_descriptor__pb2.ServiceOptions.RegisterExtension(
+ api_visibility
+ )
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\017VisibilityProtoP\001Z?google.golang.org/genproto/googleapis/api/visibility;visibility\370\001\001\242\002\004GAPI"
+ _VISIBILITY._serialized_start = 77
+ _VISIBILITY._serialized_end = 132
+ _VISIBILITYRULE._serialized_start = 134
+ _VISIBILITYRULE._serialized_end = 189
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/api_core/__init__.py b/Lib/site-packages/google/api_core/__init__.py
new file mode 100644
index 0000000..b80ea37
--- /dev/null
+++ b/Lib/site-packages/google/api_core/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google API Core.
+
+This package contains common code and utilities used by Google client libraries.
+"""
+
+from google.api_core import version as api_core_version
+
+__version__ = api_core_version.__version__
diff --git a/Lib/site-packages/google/api_core/bidi.py b/Lib/site-packages/google/api_core/bidi.py
new file mode 100644
index 0000000..78d98b9
--- /dev/null
+++ b/Lib/site-packages/google/api_core/bidi.py
@@ -0,0 +1,743 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Bi-directional streaming RPC helpers."""
+
+import collections
+import datetime
+import logging
+import queue as queue_module
+import threading
+import time
+
+from google.api_core import exceptions
+
+_LOGGER = logging.getLogger(__name__)
+_BIDIRECTIONAL_CONSUMER_NAME = "Thread-ConsumeBidirectionalStream"
+
+
+class _RequestQueueGenerator(object):
+ """A helper for sending requests to a gRPC stream from a Queue.
+
+ This generator takes requests off a given queue and yields them to gRPC.
+
+ This helper is useful when you have an indeterminate, indefinite, or
+ otherwise open-ended set of requests to send through a request-streaming
+ (or bidirectional) RPC.
+
+ The reason this is necessary is because gRPC takes an iterator as the
+ request for request-streaming RPCs. gRPC consumes this iterator in another
+ thread to allow it to block while generating requests for the stream.
+ However, if the generator blocks indefinitely gRPC will not be able to
+ clean up the thread as it'll be blocked on `next(iterator)` and not be able
+ to check the channel status to stop iterating. This helper mitigates that
+ by waiting on the queue with a timeout and checking the RPC state before
+ yielding.
+
+ Finally, it allows for retrying without swapping queues because if it does
+ pull an item off the queue when the RPC is inactive, it'll immediately put
+ it back and then exit. This is necessary because yielding the item in this
+ case will cause gRPC to discard it. In practice, this means that the order
+ of messages is not guaranteed. If such a thing is necessary it would be
+ easy to use a priority queue.
+
+ Example::
+
+ requests = request_queue_generator(q)
+ call = stub.StreamingRequest(iter(requests))
+ requests.call = call
+
+ for response in call:
+ print(response)
+ q.put(...)
+
+ Note that it is possible to accomplish this behavior without "spinning"
+ (using a queue timeout). One possible way would be to use more threads to
+ multiplex the grpc end event with the queue, another possible way is to
+ use selectors and a custom event/queue object. Both of these approaches
+ are significant from an engineering perspective for small benefit - the
+ CPU consumed by spinning is pretty minuscule.
+
+ Args:
+ queue (queue_module.Queue): The request queue.
+ period (float): The number of seconds to wait for items from the queue
+ before checking if the RPC is cancelled. In practice, this
+ determines the maximum amount of time the request consumption
+ thread will live after the RPC is cancelled.
+ initial_request (Union[protobuf.Message,
+ Callable[None, protobuf.Message]]): The initial request to
+ yield. This is done independently of the request queue to allow fo
+ easily restarting streams that require some initial configuration
+ request.
+ """
+
+ def __init__(self, queue, period=1, initial_request=None):
+ self._queue = queue
+ self._period = period
+ self._initial_request = initial_request
+ self.call = None
+
+ def _is_active(self):
+ # Note: there is a possibility that this starts *before* the call
+ # property is set. So we have to check if self.call is set before
+ # seeing if it's active. We need to return True if self.call is None.
+ # See https://github.com/googleapis/python-api-core/issues/560.
+ return self.call is None or self.call.is_active()
+
+ def __iter__(self):
+ if self._initial_request is not None:
+ if callable(self._initial_request):
+ yield self._initial_request()
+ else:
+ yield self._initial_request
+
+ while True:
+ try:
+ item = self._queue.get(timeout=self._period)
+ except queue_module.Empty:
+ if not self._is_active():
+ _LOGGER.debug(
+ "Empty queue and inactive call, exiting request " "generator."
+ )
+ return
+ else:
+ # call is still active, keep waiting for queue items.
+ continue
+
+ # The consumer explicitly sent "None", indicating that the request
+ # should end.
+ if item is None:
+ _LOGGER.debug("Cleanly exiting request generator.")
+ return
+
+ if not self._is_active():
+ # We have an item, but the call is closed. We should put the
+ # item back on the queue so that the next call can consume it.
+ self._queue.put(item)
+ _LOGGER.debug(
+ "Inactive call, replacing item on queue and exiting "
+ "request generator."
+ )
+ return
+
+ yield item
+
+
+class _Throttle(object):
+ """A context manager limiting the total entries in a sliding time window.
+
+ If more than ``access_limit`` attempts are made to enter the context manager
+ instance in the last ``time window`` interval, the exceeding requests block
+ until enough time elapses.
+
+ The context manager instances are thread-safe and can be shared between
+ multiple threads. If multiple requests are blocked and waiting to enter,
+ the exact order in which they are allowed to proceed is not determined.
+
+ Example::
+
+ max_three_per_second = _Throttle(
+ access_limit=3, time_window=datetime.timedelta(seconds=1)
+ )
+
+ for i in range(5):
+ with max_three_per_second as time_waited:
+ print("{}: Waited {} seconds to enter".format(i, time_waited))
+
+ Args:
+ access_limit (int): the maximum number of entries allowed in the time window
+ time_window (datetime.timedelta): the width of the sliding time window
+ """
+
+ def __init__(self, access_limit, time_window):
+ if access_limit < 1:
+ raise ValueError("access_limit argument must be positive")
+
+ if time_window <= datetime.timedelta(0):
+ raise ValueError("time_window argument must be a positive timedelta")
+
+ self._time_window = time_window
+ self._access_limit = access_limit
+ self._past_entries = collections.deque(
+ maxlen=access_limit
+ ) # least recent first
+ self._entry_lock = threading.Lock()
+
+ def __enter__(self):
+ with self._entry_lock:
+ cutoff_time = datetime.datetime.now() - self._time_window
+
+ # drop the entries that are too old, as they are no longer relevant
+ while self._past_entries and self._past_entries[0] < cutoff_time:
+ self._past_entries.popleft()
+
+ if len(self._past_entries) < self._access_limit:
+ self._past_entries.append(datetime.datetime.now())
+ return 0.0 # no waiting was needed
+
+ to_wait = (self._past_entries[0] - cutoff_time).total_seconds()
+ time.sleep(to_wait)
+
+ self._past_entries.append(datetime.datetime.now())
+ return to_wait
+
+ def __exit__(self, *_):
+ pass
+
+ def __repr__(self):
+ return "{}(access_limit={}, time_window={})".format(
+ self.__class__.__name__, self._access_limit, repr(self._time_window)
+ )
+
+
+class BidiRpc(object):
+ """A helper for consuming a bi-directional streaming RPC.
+
+ This maps gRPC's built-in interface which uses a request iterator and a
+ response iterator into a socket-like :func:`send` and :func:`recv`. This
+ is a more useful pattern for long-running or asymmetric streams (streams
+ where there is not a direct correlation between the requests and
+ responses).
+
+ Example::
+
+ initial_request = example_pb2.StreamingRpcRequest(
+ setting='example')
+ rpc = BidiRpc(
+ stub.StreamingRpc,
+ initial_request=initial_request,
+ metadata=[('name', 'value')]
+ )
+
+ rpc.open()
+
+ while rpc.is_active():
+ print(rpc.recv())
+ rpc.send(example_pb2.StreamingRpcRequest(
+ data='example'))
+
+ This does *not* retry the stream on errors. See :class:`ResumableBidiRpc`.
+
+ Args:
+ start_rpc (grpc.StreamStreamMultiCallable): The gRPC method used to
+ start the RPC.
+ initial_request (Union[protobuf.Message,
+ Callable[None, protobuf.Message]]): The initial request to
+ yield. This is useful if an initial request is needed to start the
+ stream.
+ metadata (Sequence[Tuple(str, str)]): RPC metadata to include in
+ the request.
+ """
+
+ def __init__(self, start_rpc, initial_request=None, metadata=None):
+ self._start_rpc = start_rpc
+ self._initial_request = initial_request
+ self._rpc_metadata = metadata
+ self._request_queue = queue_module.Queue()
+ self._request_generator = None
+ self._is_active = False
+ self._callbacks = []
+ self.call = None
+
+ def add_done_callback(self, callback):
+ """Adds a callback that will be called when the RPC terminates.
+
+ This occurs when the RPC errors or is successfully terminated.
+
+ Args:
+ callback (Callable[[grpc.Future], None]): The callback to execute.
+ It will be provided with the same gRPC future as the underlying
+ stream which will also be a :class:`grpc.Call`.
+ """
+ self._callbacks.append(callback)
+
+ def _on_call_done(self, future):
+ # This occurs when the RPC errors or is successfully terminated.
+ # Note that grpc's "future" here can also be a grpc.RpcError.
+ # See note in https://github.com/grpc/grpc/issues/10885#issuecomment-302651331
+ # that `grpc.RpcError` is also `grpc.call`.
+ for callback in self._callbacks:
+ callback(future)
+
+ def open(self):
+ """Opens the stream."""
+ if self.is_active:
+ raise ValueError("Can not open an already open stream.")
+
+ request_generator = _RequestQueueGenerator(
+ self._request_queue, initial_request=self._initial_request
+ )
+ try:
+ call = self._start_rpc(iter(request_generator), metadata=self._rpc_metadata)
+ except exceptions.GoogleAPICallError as exc:
+ # The original `grpc.RpcError` (which is usually also a `grpc.Call`) is
+ # available from the ``response`` property on the mapped exception.
+ self._on_call_done(exc.response)
+ raise
+
+ request_generator.call = call
+
+ # TODO: api_core should expose the future interface for wrapped
+ # callables as well.
+ if hasattr(call, "_wrapped"): # pragma: NO COVER
+ call._wrapped.add_done_callback(self._on_call_done)
+ else:
+ call.add_done_callback(self._on_call_done)
+
+ self._request_generator = request_generator
+ self.call = call
+
+ def close(self):
+ """Closes the stream."""
+ if self.call is None:
+ return
+
+ self._request_queue.put(None)
+ self.call.cancel()
+ self._request_generator = None
+ # Don't set self.call to None. Keep it around so that send/recv can
+ # raise the error.
+
+ def send(self, request):
+ """Queue a message to be sent on the stream.
+
+ Send is non-blocking.
+
+ If the underlying RPC has been closed, this will raise.
+
+ Args:
+ request (protobuf.Message): The request to send.
+ """
+ if self.call is None:
+ raise ValueError("Can not send() on an RPC that has never been open()ed.")
+
+ # Don't use self.is_active(), as ResumableBidiRpc will overload it
+ # to mean something semantically different.
+ if self.call.is_active():
+ self._request_queue.put(request)
+ else:
+ # calling next should cause the call to raise.
+ next(self.call)
+
+ def recv(self):
+ """Wait for a message to be returned from the stream.
+
+ Recv is blocking.
+
+ If the underlying RPC has been closed, this will raise.
+
+ Returns:
+ protobuf.Message: The received message.
+ """
+ if self.call is None:
+ raise ValueError("Can not recv() on an RPC that has never been open()ed.")
+
+ return next(self.call)
+
+ @property
+ def is_active(self):
+ """bool: True if this stream is currently open and active."""
+ return self.call is not None and self.call.is_active()
+
+ @property
+ def pending_requests(self):
+ """int: Returns an estimate of the number of queued requests."""
+ return self._request_queue.qsize()
+
+
+def _never_terminate(future_or_error):
+ """By default, no errors cause BiDi termination."""
+ return False
+
+
+class ResumableBidiRpc(BidiRpc):
+ """A :class:`BidiRpc` that can automatically resume the stream on errors.
+
+ It uses the ``should_recover`` arg to determine if it should re-establish
+ the stream on error.
+
+ Example::
+
+ def should_recover(exc):
+ return (
+ isinstance(exc, grpc.RpcError) and
+ exc.code() == grpc.StatusCode.UNAVAILABLE)
+
+ initial_request = example_pb2.StreamingRpcRequest(
+ setting='example')
+
+ metadata = [('header_name', 'value')]
+
+ rpc = ResumableBidiRpc(
+ stub.StreamingRpc,
+ should_recover=should_recover,
+ initial_request=initial_request,
+ metadata=metadata
+ )
+
+ rpc.open()
+
+ while rpc.is_active():
+ print(rpc.recv())
+ rpc.send(example_pb2.StreamingRpcRequest(
+ data='example'))
+
+ Args:
+ start_rpc (grpc.StreamStreamMultiCallable): The gRPC method used to
+ start the RPC.
+ initial_request (Union[protobuf.Message,
+ Callable[None, protobuf.Message]]): The initial request to
+ yield. This is useful if an initial request is needed to start the
+ stream.
+ should_recover (Callable[[Exception], bool]): A function that returns
+ True if the stream should be recovered. This will be called
+ whenever an error is encountered on the stream.
+ should_terminate (Callable[[Exception], bool]): A function that returns
+ True if the stream should be terminated. This will be called
+ whenever an error is encountered on the stream.
+ metadata Sequence[Tuple(str, str)]: RPC metadata to include in
+ the request.
+ throttle_reopen (bool): If ``True``, throttling will be applied to
+ stream reopen calls. Defaults to ``False``.
+ """
+
+ def __init__(
+ self,
+ start_rpc,
+ should_recover,
+ should_terminate=_never_terminate,
+ initial_request=None,
+ metadata=None,
+ throttle_reopen=False,
+ ):
+ super(ResumableBidiRpc, self).__init__(start_rpc, initial_request, metadata)
+ self._should_recover = should_recover
+ self._should_terminate = should_terminate
+ self._operational_lock = threading.RLock()
+ self._finalized = False
+ self._finalize_lock = threading.Lock()
+
+ if throttle_reopen:
+ self._reopen_throttle = _Throttle(
+ access_limit=5, time_window=datetime.timedelta(seconds=10)
+ )
+ else:
+ self._reopen_throttle = None
+
+ def _finalize(self, result):
+ with self._finalize_lock:
+ if self._finalized:
+ return
+
+ for callback in self._callbacks:
+ callback(result)
+
+ self._finalized = True
+
+ def _on_call_done(self, future):
+ # Unlike the base class, we only execute the callbacks on a terminal
+ # error, not for errors that we can recover from. Note that grpc's
+ # "future" here is also a grpc.RpcError.
+ with self._operational_lock:
+ if self._should_terminate(future):
+ self._finalize(future)
+ elif not self._should_recover(future):
+ self._finalize(future)
+ else:
+ _LOGGER.debug("Re-opening stream from gRPC callback.")
+ self._reopen()
+
+ def _reopen(self):
+ with self._operational_lock:
+ # Another thread already managed to re-open this stream.
+ if self.call is not None and self.call.is_active():
+ _LOGGER.debug("Stream was already re-established.")
+ return
+
+ self.call = None
+ # Request generator should exit cleanly since the RPC its bound to
+ # has exited.
+ self._request_generator = None
+
+ # Note: we do not currently do any sort of backoff here. The
+ # assumption is that re-establishing the stream under normal
+ # circumstances will happen in intervals greater than 60s.
+ # However, it is possible in a degenerative case that the server
+ # closes the stream rapidly which would lead to thrashing here,
+ # but hopefully in those cases the server would return a non-
+ # retryable error.
+
+ try:
+ if self._reopen_throttle:
+ with self._reopen_throttle:
+ self.open()
+ else:
+ self.open()
+ # If re-opening or re-calling the method fails for any reason,
+ # consider it a terminal error and finalize the stream.
+ except Exception as exc:
+ _LOGGER.debug("Failed to re-open stream due to %s", exc)
+ self._finalize(exc)
+ raise
+
+ _LOGGER.info("Re-established stream")
+
+ def _recoverable(self, method, *args, **kwargs):
+ """Wraps a method to recover the stream and retry on error.
+
+ If a retryable error occurs while making the call, then the stream will
+ be re-opened and the method will be retried. This happens indefinitely
+ so long as the error is a retryable one. If an error occurs while
+ re-opening the stream, then this method will raise immediately and
+ trigger finalization of this object.
+
+ Args:
+ method (Callable[..., Any]): The method to call.
+ args: The args to pass to the method.
+ kwargs: The kwargs to pass to the method.
+ """
+ while True:
+ try:
+ return method(*args, **kwargs)
+
+ except Exception as exc:
+ with self._operational_lock:
+ _LOGGER.debug("Call to retryable %r caused %s.", method, exc)
+
+ if self._should_terminate(exc):
+ self.close()
+ _LOGGER.debug("Terminating %r due to %s.", method, exc)
+ self._finalize(exc)
+ break
+
+ if not self._should_recover(exc):
+ self.close()
+ _LOGGER.debug("Not retrying %r due to %s.", method, exc)
+ self._finalize(exc)
+ raise exc
+
+ _LOGGER.debug("Re-opening stream from retryable %r.", method)
+ self._reopen()
+
+ def _send(self, request):
+ # Grab a reference to the RPC call. Because another thread (notably
+ # the gRPC error thread) can modify self.call (by invoking reopen),
+ # we should ensure our reference can not change underneath us.
+ # If self.call is modified (such as replaced with a new RPC call) then
+ # this will use the "old" RPC, which should result in the same
+ # exception passed into gRPC's error handler being raised here, which
+ # will be handled by the usual error handling in retryable.
+ with self._operational_lock:
+ call = self.call
+
+ if call is None:
+ raise ValueError("Can not send() on an RPC that has never been open()ed.")
+
+ # Don't use self.is_active(), as ResumableBidiRpc will overload it
+ # to mean something semantically different.
+ if call.is_active():
+ self._request_queue.put(request)
+ pass
+ else:
+ # calling next should cause the call to raise.
+ next(call)
+
+ def send(self, request):
+ return self._recoverable(self._send, request)
+
+ def _recv(self):
+ with self._operational_lock:
+ call = self.call
+
+ if call is None:
+ raise ValueError("Can not recv() on an RPC that has never been open()ed.")
+
+ return next(call)
+
+ def recv(self):
+ return self._recoverable(self._recv)
+
+ def close(self):
+ self._finalize(None)
+ super(ResumableBidiRpc, self).close()
+
+ @property
+ def is_active(self):
+ """bool: True if this stream is currently open and active."""
+ # Use the operational lock. It's entirely possible for something
+ # to check the active state *while* the RPC is being retried.
+ # Also, use finalized to track the actual terminal state here.
+ # This is because if the stream is re-established by the gRPC thread
+ # it's technically possible to check this between when gRPC marks the
+ # RPC as inactive and when gRPC executes our callback that re-opens
+ # the stream.
+ with self._operational_lock:
+ return self.call is not None and not self._finalized
+
+
+class BackgroundConsumer(object):
+ """A bi-directional stream consumer that runs in a separate thread.
+
+ This maps the consumption of a stream into a callback-based model. It also
+ provides :func:`pause` and :func:`resume` to allow for flow-control.
+
+ Example::
+
+ def should_recover(exc):
+ return (
+ isinstance(exc, grpc.RpcError) and
+ exc.code() == grpc.StatusCode.UNAVAILABLE)
+
+ initial_request = example_pb2.StreamingRpcRequest(
+ setting='example')
+
+ rpc = ResumeableBidiRpc(
+ stub.StreamingRpc,
+ initial_request=initial_request,
+ should_recover=should_recover)
+
+ def on_response(response):
+ print(response)
+
+ consumer = BackgroundConsumer(rpc, on_response)
+ consumer.start()
+
+ Note that error handling *must* be done by using the provided
+ ``bidi_rpc``'s ``add_done_callback``. This helper will automatically exit
+ whenever the RPC itself exits and will not provide any error details.
+
+ Args:
+ bidi_rpc (BidiRpc): The RPC to consume. Should not have been
+ ``open()``ed yet.
+ on_response (Callable[[protobuf.Message], None]): The callback to
+ be called for every response on the stream.
+ """
+
+ def __init__(self, bidi_rpc, on_response):
+ self._bidi_rpc = bidi_rpc
+ self._on_response = on_response
+ self._paused = False
+ self._wake = threading.Condition()
+ self._thread = None
+ self._operational_lock = threading.Lock()
+
+ def _on_call_done(self, future):
+ # Resume the thread if it's paused, this prevents blocking forever
+ # when the RPC has terminated.
+ self.resume()
+
+ def _thread_main(self, ready):
+ try:
+ ready.set()
+ self._bidi_rpc.add_done_callback(self._on_call_done)
+ self._bidi_rpc.open()
+
+ while self._bidi_rpc.is_active:
+ # Do not allow the paused status to change at all during this
+ # section. There is a condition where we could be resumed
+ # between checking if we are paused and calling wake.wait(),
+ # which means that we will miss the notification to wake up
+ # (oops!) and wait for a notification that will never come.
+ # Keeping the lock throughout avoids that.
+ # In the future, we could use `Condition.wait_for` if we drop
+ # Python 2.7.
+ # See: https://github.com/googleapis/python-api-core/issues/211
+ with self._wake:
+ while self._paused:
+ _LOGGER.debug("paused, waiting for waking.")
+ self._wake.wait()
+ _LOGGER.debug("woken.")
+
+ _LOGGER.debug("waiting for recv.")
+ response = self._bidi_rpc.recv()
+ _LOGGER.debug("recved response.")
+ self._on_response(response)
+
+ except exceptions.GoogleAPICallError as exc:
+ _LOGGER.debug(
+ "%s caught error %s and will exit. Generally this is due to "
+ "the RPC itself being cancelled and the error will be "
+ "surfaced to the calling code.",
+ _BIDIRECTIONAL_CONSUMER_NAME,
+ exc,
+ exc_info=True,
+ )
+
+ except Exception as exc:
+ _LOGGER.exception(
+ "%s caught unexpected exception %s and will exit.",
+ _BIDIRECTIONAL_CONSUMER_NAME,
+ exc,
+ )
+
+ _LOGGER.info("%s exiting", _BIDIRECTIONAL_CONSUMER_NAME)
+
+ def start(self):
+ """Start the background thread and begin consuming the thread."""
+ with self._operational_lock:
+ ready = threading.Event()
+ thread = threading.Thread(
+ name=_BIDIRECTIONAL_CONSUMER_NAME,
+ target=self._thread_main,
+ args=(ready,),
+ )
+ thread.daemon = True
+ thread.start()
+ # Other parts of the code rely on `thread.is_alive` which
+ # isn't sufficient to know if a thread is active, just that it may
+ # soon be active. This can cause races. Further protect
+ # against races by using a ready event and wait on it to be set.
+ ready.wait()
+ self._thread = thread
+ _LOGGER.debug("Started helper thread %s", thread.name)
+
+ def stop(self):
+ """Stop consuming the stream and shutdown the background thread."""
+ with self._operational_lock:
+ self._bidi_rpc.close()
+
+ if self._thread is not None:
+ # Resume the thread to wake it up in case it is sleeping.
+ self.resume()
+ # The daemonized thread may itself block, so don't wait
+ # for it longer than a second.
+ self._thread.join(1.0)
+ if self._thread.is_alive(): # pragma: NO COVER
+ _LOGGER.warning("Background thread did not exit.")
+
+ self._thread = None
+
+ @property
+ def is_active(self):
+ """bool: True if the background thread is active."""
+ return self._thread is not None and self._thread.is_alive()
+
+ def pause(self):
+ """Pauses the response stream.
+
+ This does *not* pause the request stream.
+ """
+ with self._wake:
+ self._paused = True
+
+ def resume(self):
+ """Resumes the response stream."""
+ with self._wake:
+ self._paused = False
+ self._wake.notify_all()
+
+ @property
+ def is_paused(self):
+ """bool: True if the response stream is paused."""
+ return self._paused
diff --git a/Lib/site-packages/google/api_core/client_info.py b/Lib/site-packages/google/api_core/client_info.py
new file mode 100644
index 0000000..4832679
--- /dev/null
+++ b/Lib/site-packages/google/api_core/client_info.py
@@ -0,0 +1,107 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for providing client information.
+
+Client information is used to send information about the calling client,
+such as the library and Python version, to API services.
+"""
+
+import platform
+from typing import Union
+
+from google.api_core import version as api_core_version
+
+_PY_VERSION = platform.python_version()
+_API_CORE_VERSION = api_core_version.__version__
+
+_GRPC_VERSION: Union[str, None]
+
+try:
+ import grpc
+
+ _GRPC_VERSION = grpc.__version__
+except ImportError: # pragma: NO COVER
+ _GRPC_VERSION = None
+
+
+class ClientInfo(object):
+ """Client information used to generate a user-agent for API calls.
+
+ This user-agent information is sent along with API calls to allow the
+ receiving service to do analytics on which versions of Python and Google
+ libraries are being used.
+
+ Args:
+ python_version (str): The Python interpreter version, for example,
+ ``'3.9.6'``.
+ grpc_version (Optional[str]): The gRPC library version.
+ api_core_version (str): The google-api-core library version.
+ gapic_version (Optional[str]): The version of gapic-generated client
+ library, if the library was generated by gapic.
+ client_library_version (Optional[str]): The version of the client
+ library, generally used if the client library was not generated
+ by gapic or if additional functionality was built on top of
+ a gapic client library.
+ user_agent (Optional[str]): Prefix to the user agent header. This is
+ used to supply information such as application name or partner tool.
+ Recommended format: ``application-or-tool-ID/major.minor.version``.
+ rest_version (Optional[str]): The requests library version.
+ """
+
+ def __init__(
+ self,
+ python_version=_PY_VERSION,
+ grpc_version=_GRPC_VERSION,
+ api_core_version=_API_CORE_VERSION,
+ gapic_version=None,
+ client_library_version=None,
+ user_agent=None,
+ rest_version=None,
+ ):
+ self.python_version = python_version
+ self.grpc_version = grpc_version
+ self.api_core_version = api_core_version
+ self.gapic_version = gapic_version
+ self.client_library_version = client_library_version
+ self.user_agent = user_agent
+ self.rest_version = rest_version
+
+ def to_user_agent(self):
+ """Returns the user-agent string for this client info."""
+
+ # Note: the order here is important as the internal metrics system
+ # expects these items to be in specific locations.
+ ua = ""
+
+ if self.user_agent is not None:
+ ua += "{user_agent} "
+
+ ua += "gl-python/{python_version} "
+
+ if self.grpc_version is not None:
+ ua += "grpc/{grpc_version} "
+
+ if self.rest_version is not None:
+ ua += "rest/{rest_version} "
+
+ ua += "gax/{api_core_version} "
+
+ if self.gapic_version is not None:
+ ua += "gapic/{gapic_version} "
+
+ if self.client_library_version is not None:
+ ua += "gccl/{client_library_version} "
+
+ return ua.format(**self.__dict__).strip()
diff --git a/Lib/site-packages/google/api_core/client_options.py b/Lib/site-packages/google/api_core/client_options.py
new file mode 100644
index 0000000..e93f958
--- /dev/null
+++ b/Lib/site-packages/google/api_core/client_options.py
@@ -0,0 +1,137 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client options class.
+
+Client options provide a consistent interface for user options to be defined
+across clients.
+
+You can pass a client options object to a client.
+
+.. code-block:: python
+
+ from google.api_core.client_options import ClientOptions
+ from google.cloud.vision_v1 import ImageAnnotatorClient
+
+ def get_client_cert():
+ # code to load client certificate and private key.
+ return client_cert_bytes, client_private_key_bytes
+
+ options = ClientOptions(api_endpoint="foo.googleapis.com",
+ client_cert_source=get_client_cert)
+
+ client = ImageAnnotatorClient(client_options=options)
+
+You can also pass a mapping object.
+
+.. code-block:: python
+
+ from google.cloud.vision_v1 import ImageAnnotatorClient
+
+ client = ImageAnnotatorClient(
+ client_options={
+ "api_endpoint": "foo.googleapis.com",
+ "client_cert_source" : get_client_cert
+ })
+
+
+"""
+
+
+class ClientOptions(object):
+ """Client Options used to set options on clients.
+
+ Args:
+ api_endpoint (Optional[str]): The desired API endpoint, e.g.,
+ compute.googleapis.com
+ client_cert_source (Optional[Callable[[], (bytes, bytes)]]): A callback
+ which returns client certificate bytes and private key bytes both in
+ PEM format. ``client_cert_source`` and ``client_encrypted_cert_source``
+ are mutually exclusive.
+ client_encrypted_cert_source (Optional[Callable[[], (str, str, bytes)]]):
+ A callback which returns client certificate file path, encrypted
+ private key file path, and the passphrase bytes.``client_cert_source``
+ and ``client_encrypted_cert_source`` are mutually exclusive.
+ quota_project_id (Optional[str]): A project name that a client's
+ quota belongs to.
+ credentials_file (Optional[str]): A path to a file storing credentials.
+ ``credentials_file` and ``api_key`` are mutually exclusive.
+ scopes (Optional[Sequence[str]]): OAuth access token override scopes.
+ api_key (Optional[str]): Google API key. ``credentials_file`` and
+ ``api_key`` are mutually exclusive.
+ api_audience (Optional[str]): The intended audience for the API calls
+ to the service that will be set when using certain 3rd party
+ authentication flows. Audience is typically a resource identifier.
+ If not set, the service endpoint value will be used as a default.
+ An example of a valid ``api_audience`` is: "https://language.googleapis.com".
+ universe_domain (Optional[str]): The desired universe domain. This must match
+ the one in credentials. If not set, the default universe domain is
+ `googleapis.com`. If both `api_endpoint` and `universe_domain` are set,
+ then `api_endpoint` is used as the service endpoint. If `api_endpoint` is
+ not specified, the format will be `{service}.{universe_domain}`.
+
+ Raises:
+ ValueError: If both ``client_cert_source`` and ``client_encrypted_cert_source``
+ are provided, or both ``credentials_file`` and ``api_key`` are provided.
+ """
+
+ def __init__(
+ self,
+ api_endpoint=None,
+ client_cert_source=None,
+ client_encrypted_cert_source=None,
+ quota_project_id=None,
+ credentials_file=None,
+ scopes=None,
+ api_key=None,
+ api_audience=None,
+ universe_domain=None,
+ ):
+ if client_cert_source and client_encrypted_cert_source:
+ raise ValueError(
+ "client_cert_source and client_encrypted_cert_source are mutually exclusive"
+ )
+ if api_key and credentials_file:
+ raise ValueError("api_key and credentials_file are mutually exclusive")
+ self.api_endpoint = api_endpoint
+ self.client_cert_source = client_cert_source
+ self.client_encrypted_cert_source = client_encrypted_cert_source
+ self.quota_project_id = quota_project_id
+ self.credentials_file = credentials_file
+ self.scopes = scopes
+ self.api_key = api_key
+ self.api_audience = api_audience
+ self.universe_domain = universe_domain
+
+ def __repr__(self):
+ return "ClientOptions: " + repr(self.__dict__)
+
+
+def from_dict(options):
+ """Construct a client options object from a mapping object.
+
+ Args:
+ options (collections.abc.Mapping): A mapping object with client options.
+ See the docstring for ClientOptions for details on valid arguments.
+ """
+
+ client_options = ClientOptions()
+
+ for key, value in options.items():
+ if hasattr(client_options, key):
+ setattr(client_options, key, value)
+ else:
+ raise ValueError("ClientOptions does not accept an option '" + key + "'")
+
+ return client_options
diff --git a/Lib/site-packages/google/api_core/datetime_helpers.py b/Lib/site-packages/google/api_core/datetime_helpers.py
new file mode 100644
index 0000000..c379230
--- /dev/null
+++ b/Lib/site-packages/google/api_core/datetime_helpers.py
@@ -0,0 +1,298 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for :mod:`datetime`."""
+
+import calendar
+import datetime
+import re
+
+from google.protobuf import timestamp_pb2
+
+
+_UTC_EPOCH = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc)
+_RFC3339_MICROS = "%Y-%m-%dT%H:%M:%S.%fZ"
+_RFC3339_NO_FRACTION = "%Y-%m-%dT%H:%M:%S"
+# datetime.strptime cannot handle nanosecond precision: parse w/ regex
+_RFC3339_NANOS = re.compile(
+ r"""
+ (?P
+ \d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2} # YYYY-MM-DDTHH:MM:SS
+ )
+ ( # Optional decimal part
+ \. # decimal point
+ (?P\d{1,9}) # nanoseconds, maybe truncated
+ )?
+ Z # Zulu
+""",
+ re.VERBOSE,
+)
+
+
+def utcnow():
+ """A :meth:`datetime.datetime.utcnow()` alias to allow mocking in tests."""
+ return datetime.datetime.now(tz=datetime.timezone.utc).replace(tzinfo=None)
+
+
+def to_milliseconds(value):
+ """Convert a zone-aware datetime to milliseconds since the unix epoch.
+
+ Args:
+ value (datetime.datetime): The datetime to covert.
+
+ Returns:
+ int: Milliseconds since the unix epoch.
+ """
+ micros = to_microseconds(value)
+ return micros // 1000
+
+
+def from_microseconds(value):
+ """Convert timestamp in microseconds since the unix epoch to datetime.
+
+ Args:
+ value (float): The timestamp to convert, in microseconds.
+
+ Returns:
+ datetime.datetime: The datetime object equivalent to the timestamp in
+ UTC.
+ """
+ return _UTC_EPOCH + datetime.timedelta(microseconds=value)
+
+
+def to_microseconds(value):
+ """Convert a datetime to microseconds since the unix epoch.
+
+ Args:
+ value (datetime.datetime): The datetime to covert.
+
+ Returns:
+ int: Microseconds since the unix epoch.
+ """
+ if not value.tzinfo:
+ value = value.replace(tzinfo=datetime.timezone.utc)
+ # Regardless of what timezone is on the value, convert it to UTC.
+ value = value.astimezone(datetime.timezone.utc)
+ # Convert the datetime to a microsecond timestamp.
+ return int(calendar.timegm(value.timetuple()) * 1e6) + value.microsecond
+
+
+def from_iso8601_date(value):
+ """Convert a ISO8601 date string to a date.
+
+ Args:
+ value (str): The ISO8601 date string.
+
+ Returns:
+ datetime.date: A date equivalent to the date string.
+ """
+ return datetime.datetime.strptime(value, "%Y-%m-%d").date()
+
+
+def from_iso8601_time(value):
+ """Convert a zoneless ISO8601 time string to a time.
+
+ Args:
+ value (str): The ISO8601 time string.
+
+ Returns:
+ datetime.time: A time equivalent to the time string.
+ """
+ return datetime.datetime.strptime(value, "%H:%M:%S").time()
+
+
+def from_rfc3339(value):
+ """Convert an RFC3339-format timestamp to a native datetime.
+
+ Supported formats include those without fractional seconds, or with
+ any fraction up to nanosecond precision.
+
+ .. note::
+ Python datetimes do not support nanosecond precision; this function
+ therefore truncates such values to microseconds.
+
+ Args:
+ value (str): The RFC3339 string to convert.
+
+ Returns:
+ datetime.datetime: The datetime object equivalent to the timestamp
+ in UTC.
+
+ Raises:
+ ValueError: If the timestamp does not match the RFC3339
+ regular expression.
+ """
+ with_nanos = _RFC3339_NANOS.match(value)
+
+ if with_nanos is None:
+ raise ValueError(
+ "Timestamp: {!r}, does not match pattern: {!r}".format(
+ value, _RFC3339_NANOS.pattern
+ )
+ )
+
+ bare_seconds = datetime.datetime.strptime(
+ with_nanos.group("no_fraction"), _RFC3339_NO_FRACTION
+ )
+ fraction = with_nanos.group("nanos")
+
+ if fraction is None:
+ micros = 0
+ else:
+ scale = 9 - len(fraction)
+ nanos = int(fraction) * (10**scale)
+ micros = nanos // 1000
+
+ return bare_seconds.replace(microsecond=micros, tzinfo=datetime.timezone.utc)
+
+
+from_rfc3339_nanos = from_rfc3339 # from_rfc3339_nanos method was deprecated.
+
+
+def to_rfc3339(value, ignore_zone=True):
+ """Convert a datetime to an RFC3339 timestamp string.
+
+ Args:
+ value (datetime.datetime):
+ The datetime object to be converted to a string.
+ ignore_zone (bool): If True, then the timezone (if any) of the
+ datetime object is ignored and the datetime is treated as UTC.
+
+ Returns:
+ str: The RFC3339 formatted string representing the datetime.
+ """
+ if not ignore_zone and value.tzinfo is not None:
+ # Convert to UTC and remove the time zone info.
+ value = value.replace(tzinfo=None) - value.utcoffset()
+
+ return value.strftime(_RFC3339_MICROS)
+
+
+class DatetimeWithNanoseconds(datetime.datetime):
+ """Track nanosecond in addition to normal datetime attrs.
+
+ Nanosecond can be passed only as a keyword argument.
+ """
+
+ __slots__ = ("_nanosecond",)
+
+ # pylint: disable=arguments-differ
+ def __new__(cls, *args, **kw):
+ nanos = kw.pop("nanosecond", 0)
+ if nanos > 0:
+ if "microsecond" in kw:
+ raise TypeError("Specify only one of 'microsecond' or 'nanosecond'")
+ kw["microsecond"] = nanos // 1000
+ inst = datetime.datetime.__new__(cls, *args, **kw)
+ inst._nanosecond = nanos or 0
+ return inst
+
+ # pylint: disable=arguments-differ
+
+ @property
+ def nanosecond(self):
+ """Read-only: nanosecond precision."""
+ return self._nanosecond
+
+ def rfc3339(self):
+ """Return an RFC3339-compliant timestamp.
+
+ Returns:
+ (str): Timestamp string according to RFC3339 spec.
+ """
+ if self._nanosecond == 0:
+ return to_rfc3339(self)
+ nanos = str(self._nanosecond).rjust(9, "0").rstrip("0")
+ return "{}.{}Z".format(self.strftime(_RFC3339_NO_FRACTION), nanos)
+
+ @classmethod
+ def from_rfc3339(cls, stamp):
+ """Parse RFC3339-compliant timestamp, preserving nanoseconds.
+
+ Args:
+ stamp (str): RFC3339 stamp, with up to nanosecond precision
+
+ Returns:
+ :class:`DatetimeWithNanoseconds`:
+ an instance matching the timestamp string
+
+ Raises:
+ ValueError: if `stamp` does not match the expected format
+ """
+ with_nanos = _RFC3339_NANOS.match(stamp)
+ if with_nanos is None:
+ raise ValueError(
+ "Timestamp: {}, does not match pattern: {}".format(
+ stamp, _RFC3339_NANOS.pattern
+ )
+ )
+ bare = datetime.datetime.strptime(
+ with_nanos.group("no_fraction"), _RFC3339_NO_FRACTION
+ )
+ fraction = with_nanos.group("nanos")
+ if fraction is None:
+ nanos = 0
+ else:
+ scale = 9 - len(fraction)
+ nanos = int(fraction) * (10**scale)
+ return cls(
+ bare.year,
+ bare.month,
+ bare.day,
+ bare.hour,
+ bare.minute,
+ bare.second,
+ nanosecond=nanos,
+ tzinfo=datetime.timezone.utc,
+ )
+
+ def timestamp_pb(self):
+ """Return a timestamp message.
+
+ Returns:
+ (:class:`~google.protobuf.timestamp_pb2.Timestamp`): Timestamp message
+ """
+ inst = (
+ self
+ if self.tzinfo is not None
+ else self.replace(tzinfo=datetime.timezone.utc)
+ )
+ delta = inst - _UTC_EPOCH
+ seconds = int(delta.total_seconds())
+ nanos = self._nanosecond or self.microsecond * 1000
+ return timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)
+
+ @classmethod
+ def from_timestamp_pb(cls, stamp):
+ """Parse RFC3339-compliant timestamp, preserving nanoseconds.
+
+ Args:
+ stamp (:class:`~google.protobuf.timestamp_pb2.Timestamp`): timestamp message
+
+ Returns:
+ :class:`DatetimeWithNanoseconds`:
+ an instance matching the timestamp message
+ """
+ microseconds = int(stamp.seconds * 1e6)
+ bare = from_microseconds(microseconds)
+ return cls(
+ bare.year,
+ bare.month,
+ bare.day,
+ bare.hour,
+ bare.minute,
+ bare.second,
+ nanosecond=stamp.nanos,
+ tzinfo=datetime.timezone.utc,
+ )
diff --git a/Lib/site-packages/google/api_core/exceptions.py b/Lib/site-packages/google/api_core/exceptions.py
new file mode 100644
index 0000000..d4cb997
--- /dev/null
+++ b/Lib/site-packages/google/api_core/exceptions.py
@@ -0,0 +1,626 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Exceptions raised by Google API core & clients.
+
+This module provides base classes for all errors raised by libraries based
+on :mod:`google.api_core`, including both HTTP and gRPC clients.
+"""
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import http.client
+from typing import Dict
+from typing import Union
+import warnings
+
+from google.rpc import error_details_pb2
+
+try:
+ import grpc
+
+ try:
+ from grpc_status import rpc_status
+ except ImportError: # pragma: NO COVER
+ warnings.warn(
+ "Please install grpcio-status to obtain helpful grpc error messages.",
+ ImportWarning,
+ )
+ rpc_status = None
+except ImportError: # pragma: NO COVER
+ grpc = None
+
+# Lookup tables for mapping exceptions from HTTP and gRPC transports.
+# Populated by _GoogleAPICallErrorMeta
+_HTTP_CODE_TO_EXCEPTION: Dict[int, Exception] = {}
+_GRPC_CODE_TO_EXCEPTION: Dict[int, Exception] = {}
+
+# Additional lookup table to map integer status codes to grpc status code
+# grpc does not currently support initializing enums from ints
+# i.e., grpc.StatusCode(5) raises an error
+_INT_TO_GRPC_CODE = {}
+if grpc is not None: # pragma: no branch
+ for x in grpc.StatusCode:
+ _INT_TO_GRPC_CODE[x.value[0]] = x
+
+
+class GoogleAPIError(Exception):
+ """Base class for all exceptions raised by Google API Clients."""
+
+ pass
+
+
+class DuplicateCredentialArgs(GoogleAPIError):
+ """Raised when multiple credentials are passed."""
+
+ pass
+
+
+class RetryError(GoogleAPIError):
+ """Raised when a function has exhausted all of its available retries.
+
+ Args:
+ message (str): The exception message.
+ cause (Exception): The last exception raised when retrying the
+ function.
+ """
+
+ def __init__(self, message, cause):
+ super(RetryError, self).__init__(message)
+ self.message = message
+ self._cause = cause
+
+ @property
+ def cause(self):
+ """The last exception raised when retrying the function."""
+ return self._cause
+
+ def __str__(self):
+ return "{}, last exception: {}".format(self.message, self.cause)
+
+
+class _GoogleAPICallErrorMeta(type):
+ """Metaclass for registering GoogleAPICallError subclasses."""
+
+ def __new__(mcs, name, bases, class_dict):
+ cls = type.__new__(mcs, name, bases, class_dict)
+ if cls.code is not None:
+ _HTTP_CODE_TO_EXCEPTION.setdefault(cls.code, cls)
+ if cls.grpc_status_code is not None:
+ _GRPC_CODE_TO_EXCEPTION.setdefault(cls.grpc_status_code, cls)
+ return cls
+
+
+class GoogleAPICallError(GoogleAPIError, metaclass=_GoogleAPICallErrorMeta):
+ """Base class for exceptions raised by calling API methods.
+
+ Args:
+ message (str): The exception message.
+ errors (Sequence[Any]): An optional list of error details.
+ details (Sequence[Any]): An optional list of objects defined in google.rpc.error_details.
+ response (Union[requests.Request, grpc.Call]): The response or
+ gRPC call metadata.
+ error_info (Union[error_details_pb2.ErrorInfo, None]): An optional object containing error info
+ (google.rpc.error_details.ErrorInfo).
+ """
+
+ code: Union[int, None] = None
+ """Optional[int]: The HTTP status code associated with this error.
+
+ This may be ``None`` if the exception does not have a direct mapping
+ to an HTTP error.
+
+ See http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
+ """
+
+ grpc_status_code = None
+ """Optional[grpc.StatusCode]: The gRPC status code associated with this
+ error.
+
+ This may be ``None`` if the exception does not match up to a gRPC error.
+ """
+
+ def __init__(self, message, errors=(), details=(), response=None, error_info=None):
+ super(GoogleAPICallError, self).__init__(message)
+ self.message = message
+ """str: The exception message."""
+ self._errors = errors
+ self._details = details
+ self._response = response
+ self._error_info = error_info
+
+ def __str__(self):
+ error_msg = "{} {}".format(self.code, self.message)
+ if self.details:
+ error_msg = "{} {}".format(error_msg, self.details)
+ # Note: This else condition can be removed once proposal A from
+ # b/284179390 is implemented.
+ else:
+ if self.errors:
+ errors = [
+ f"{error.code}: {error.message}"
+ for error in self.errors
+ if hasattr(error, "code") and hasattr(error, "message")
+ ]
+ if errors:
+ error_msg = "{} {}".format(error_msg, "\n".join(errors))
+ return error_msg
+
+ @property
+ def reason(self):
+ """The reason of the error.
+
+ Reference:
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112
+
+ Returns:
+ Union[str, None]: An optional string containing reason of the error.
+ """
+ return self._error_info.reason if self._error_info else None
+
+ @property
+ def domain(self):
+ """The logical grouping to which the "reason" belongs.
+
+ Reference:
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112
+
+ Returns:
+ Union[str, None]: An optional string containing a logical grouping to which the "reason" belongs.
+ """
+ return self._error_info.domain if self._error_info else None
+
+ @property
+ def metadata(self):
+ """Additional structured details about this error.
+
+ Reference:
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112
+
+ Returns:
+ Union[Dict[str, str], None]: An optional object containing structured details about the error.
+ """
+ return self._error_info.metadata if self._error_info else None
+
+ @property
+ def errors(self):
+ """Detailed error information.
+
+ Returns:
+ Sequence[Any]: A list of additional error details.
+ """
+ return list(self._errors)
+
+ @property
+ def details(self):
+ """Information contained in google.rpc.status.details.
+
+ Reference:
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/status.proto
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto
+
+ Returns:
+ Sequence[Any]: A list of structured objects from error_details.proto
+ """
+ return list(self._details)
+
+ @property
+ def response(self):
+ """Optional[Union[requests.Request, grpc.Call]]: The response or
+ gRPC call metadata."""
+ return self._response
+
+
+class Redirection(GoogleAPICallError):
+ """Base class for for all redirection (HTTP 3xx) responses."""
+
+
+class MovedPermanently(Redirection):
+ """Exception mapping a ``301 Moved Permanently`` response."""
+
+ code = http.client.MOVED_PERMANENTLY
+
+
+class NotModified(Redirection):
+ """Exception mapping a ``304 Not Modified`` response."""
+
+ code = http.client.NOT_MODIFIED
+
+
+class TemporaryRedirect(Redirection):
+ """Exception mapping a ``307 Temporary Redirect`` response."""
+
+ code = http.client.TEMPORARY_REDIRECT
+
+
+class ResumeIncomplete(Redirection):
+ """Exception mapping a ``308 Resume Incomplete`` response.
+
+ .. note:: :attr:`http.client.PERMANENT_REDIRECT` is ``308``, but Google
+ APIs differ in their use of this status code.
+ """
+
+ code = 308
+
+
+class ClientError(GoogleAPICallError):
+ """Base class for all client error (HTTP 4xx) responses."""
+
+
+class BadRequest(ClientError):
+ """Exception mapping a ``400 Bad Request`` response."""
+
+ code = http.client.BAD_REQUEST
+
+
+class InvalidArgument(BadRequest):
+ """Exception mapping a :attr:`grpc.StatusCode.INVALID_ARGUMENT` error."""
+
+ grpc_status_code = grpc.StatusCode.INVALID_ARGUMENT if grpc is not None else None
+
+
+class FailedPrecondition(BadRequest):
+ """Exception mapping a :attr:`grpc.StatusCode.FAILED_PRECONDITION`
+ error."""
+
+ grpc_status_code = grpc.StatusCode.FAILED_PRECONDITION if grpc is not None else None
+
+
+class OutOfRange(BadRequest):
+ """Exception mapping a :attr:`grpc.StatusCode.OUT_OF_RANGE` error."""
+
+ grpc_status_code = grpc.StatusCode.OUT_OF_RANGE if grpc is not None else None
+
+
+class Unauthorized(ClientError):
+ """Exception mapping a ``401 Unauthorized`` response."""
+
+ code = http.client.UNAUTHORIZED
+
+
+class Unauthenticated(Unauthorized):
+ """Exception mapping a :attr:`grpc.StatusCode.UNAUTHENTICATED` error."""
+
+ grpc_status_code = grpc.StatusCode.UNAUTHENTICATED if grpc is not None else None
+
+
+class Forbidden(ClientError):
+ """Exception mapping a ``403 Forbidden`` response."""
+
+ code = http.client.FORBIDDEN
+
+
+class PermissionDenied(Forbidden):
+ """Exception mapping a :attr:`grpc.StatusCode.PERMISSION_DENIED` error."""
+
+ grpc_status_code = grpc.StatusCode.PERMISSION_DENIED if grpc is not None else None
+
+
+class NotFound(ClientError):
+ """Exception mapping a ``404 Not Found`` response or a
+ :attr:`grpc.StatusCode.NOT_FOUND` error."""
+
+ code = http.client.NOT_FOUND
+ grpc_status_code = grpc.StatusCode.NOT_FOUND if grpc is not None else None
+
+
+class MethodNotAllowed(ClientError):
+ """Exception mapping a ``405 Method Not Allowed`` response."""
+
+ code = http.client.METHOD_NOT_ALLOWED
+
+
+class Conflict(ClientError):
+ """Exception mapping a ``409 Conflict`` response."""
+
+ code = http.client.CONFLICT
+
+
+class AlreadyExists(Conflict):
+ """Exception mapping a :attr:`grpc.StatusCode.ALREADY_EXISTS` error."""
+
+ grpc_status_code = grpc.StatusCode.ALREADY_EXISTS if grpc is not None else None
+
+
+class Aborted(Conflict):
+ """Exception mapping a :attr:`grpc.StatusCode.ABORTED` error."""
+
+ grpc_status_code = grpc.StatusCode.ABORTED if grpc is not None else None
+
+
+class LengthRequired(ClientError):
+ """Exception mapping a ``411 Length Required`` response."""
+
+ code = http.client.LENGTH_REQUIRED
+
+
+class PreconditionFailed(ClientError):
+ """Exception mapping a ``412 Precondition Failed`` response."""
+
+ code = http.client.PRECONDITION_FAILED
+
+
+class RequestRangeNotSatisfiable(ClientError):
+ """Exception mapping a ``416 Request Range Not Satisfiable`` response."""
+
+ code = http.client.REQUESTED_RANGE_NOT_SATISFIABLE
+
+
+class TooManyRequests(ClientError):
+ """Exception mapping a ``429 Too Many Requests`` response."""
+
+ code = http.client.TOO_MANY_REQUESTS
+
+
+class ResourceExhausted(TooManyRequests):
+ """Exception mapping a :attr:`grpc.StatusCode.RESOURCE_EXHAUSTED` error."""
+
+ grpc_status_code = grpc.StatusCode.RESOURCE_EXHAUSTED if grpc is not None else None
+
+
+class Cancelled(ClientError):
+ """Exception mapping a :attr:`grpc.StatusCode.CANCELLED` error."""
+
+ # This maps to HTTP status code 499. See
+ # https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto
+ code = 499
+ grpc_status_code = grpc.StatusCode.CANCELLED if grpc is not None else None
+
+
+class ServerError(GoogleAPICallError):
+ """Base for 5xx responses."""
+
+
+class InternalServerError(ServerError):
+ """Exception mapping a ``500 Internal Server Error`` response. or a
+ :attr:`grpc.StatusCode.INTERNAL` error."""
+
+ code = http.client.INTERNAL_SERVER_ERROR
+ grpc_status_code = grpc.StatusCode.INTERNAL if grpc is not None else None
+
+
+class Unknown(ServerError):
+ """Exception mapping a :attr:`grpc.StatusCode.UNKNOWN` error."""
+
+ grpc_status_code = grpc.StatusCode.UNKNOWN if grpc is not None else None
+
+
+class DataLoss(ServerError):
+ """Exception mapping a :attr:`grpc.StatusCode.DATA_LOSS` error."""
+
+ grpc_status_code = grpc.StatusCode.DATA_LOSS if grpc is not None else None
+
+
+class MethodNotImplemented(ServerError):
+ """Exception mapping a ``501 Not Implemented`` response or a
+ :attr:`grpc.StatusCode.UNIMPLEMENTED` error."""
+
+ code = http.client.NOT_IMPLEMENTED
+ grpc_status_code = grpc.StatusCode.UNIMPLEMENTED if grpc is not None else None
+
+
+class BadGateway(ServerError):
+ """Exception mapping a ``502 Bad Gateway`` response."""
+
+ code = http.client.BAD_GATEWAY
+
+
+class ServiceUnavailable(ServerError):
+ """Exception mapping a ``503 Service Unavailable`` response or a
+ :attr:`grpc.StatusCode.UNAVAILABLE` error."""
+
+ code = http.client.SERVICE_UNAVAILABLE
+ grpc_status_code = grpc.StatusCode.UNAVAILABLE if grpc is not None else None
+
+
+class GatewayTimeout(ServerError):
+ """Exception mapping a ``504 Gateway Timeout`` response."""
+
+ code = http.client.GATEWAY_TIMEOUT
+
+
+class DeadlineExceeded(GatewayTimeout):
+ """Exception mapping a :attr:`grpc.StatusCode.DEADLINE_EXCEEDED` error."""
+
+ grpc_status_code = grpc.StatusCode.DEADLINE_EXCEEDED if grpc is not None else None
+
+
+def exception_class_for_http_status(status_code):
+ """Return the exception class for a specific HTTP status code.
+
+ Args:
+ status_code (int): The HTTP status code.
+
+ Returns:
+ :func:`type`: the appropriate subclass of :class:`GoogleAPICallError`.
+ """
+ return _HTTP_CODE_TO_EXCEPTION.get(status_code, GoogleAPICallError)
+
+
+def from_http_status(status_code, message, **kwargs):
+ """Create a :class:`GoogleAPICallError` from an HTTP status code.
+
+ Args:
+ status_code (int): The HTTP status code.
+ message (str): The exception message.
+ kwargs: Additional arguments passed to the :class:`GoogleAPICallError`
+ constructor.
+
+ Returns:
+ GoogleAPICallError: An instance of the appropriate subclass of
+ :class:`GoogleAPICallError`.
+ """
+ error_class = exception_class_for_http_status(status_code)
+ error = error_class(message, **kwargs)
+
+ if error.code is None:
+ error.code = status_code
+
+ return error
+
+
+def from_http_response(response):
+ """Create a :class:`GoogleAPICallError` from a :class:`requests.Response`.
+
+ Args:
+ response (requests.Response): The HTTP response.
+
+ Returns:
+ GoogleAPICallError: An instance of the appropriate subclass of
+ :class:`GoogleAPICallError`, with the message and errors populated
+ from the response.
+ """
+ try:
+ payload = response.json()
+ except ValueError:
+ payload = {"error": {"message": response.text or "unknown error"}}
+
+ error_message = payload.get("error", {}).get("message", "unknown error")
+ errors = payload.get("error", {}).get("errors", ())
+ # In JSON, details are already formatted in developer-friendly way.
+ details = payload.get("error", {}).get("details", ())
+ error_info = list(
+ filter(
+ lambda detail: detail.get("@type", "")
+ == "type.googleapis.com/google.rpc.ErrorInfo",
+ details,
+ )
+ )
+ error_info = error_info[0] if error_info else None
+
+ message = "{method} {url}: {error}".format(
+ method=response.request.method,
+ url=response.request.url,
+ error=error_message,
+ )
+
+ exception = from_http_status(
+ response.status_code,
+ message,
+ errors=errors,
+ details=details,
+ response=response,
+ error_info=error_info,
+ )
+ return exception
+
+
+def exception_class_for_grpc_status(status_code):
+ """Return the exception class for a specific :class:`grpc.StatusCode`.
+
+ Args:
+ status_code (grpc.StatusCode): The gRPC status code.
+
+ Returns:
+ :func:`type`: the appropriate subclass of :class:`GoogleAPICallError`.
+ """
+ return _GRPC_CODE_TO_EXCEPTION.get(status_code, GoogleAPICallError)
+
+
+def from_grpc_status(status_code, message, **kwargs):
+ """Create a :class:`GoogleAPICallError` from a :class:`grpc.StatusCode`.
+
+ Args:
+ status_code (Union[grpc.StatusCode, int]): The gRPC status code.
+ message (str): The exception message.
+ kwargs: Additional arguments passed to the :class:`GoogleAPICallError`
+ constructor.
+
+ Returns:
+ GoogleAPICallError: An instance of the appropriate subclass of
+ :class:`GoogleAPICallError`.
+ """
+
+ if isinstance(status_code, int):
+ status_code = _INT_TO_GRPC_CODE.get(status_code, status_code)
+
+ error_class = exception_class_for_grpc_status(status_code)
+ error = error_class(message, **kwargs)
+
+ if error.grpc_status_code is None:
+ error.grpc_status_code = status_code
+
+ return error
+
+
+def _is_informative_grpc_error(rpc_exc):
+ return hasattr(rpc_exc, "code") and hasattr(rpc_exc, "details")
+
+
+def _parse_grpc_error_details(rpc_exc):
+ try:
+ status = rpc_status.from_call(rpc_exc)
+ except NotImplementedError: # workaround
+ return [], None
+
+ if not status:
+ return [], None
+
+ possible_errors = [
+ error_details_pb2.BadRequest,
+ error_details_pb2.PreconditionFailure,
+ error_details_pb2.QuotaFailure,
+ error_details_pb2.ErrorInfo,
+ error_details_pb2.RetryInfo,
+ error_details_pb2.ResourceInfo,
+ error_details_pb2.RequestInfo,
+ error_details_pb2.DebugInfo,
+ error_details_pb2.Help,
+ error_details_pb2.LocalizedMessage,
+ ]
+ error_info = None
+ error_details = []
+ for detail in status.details:
+ matched_detail_cls = list(
+ filter(lambda x: detail.Is(x.DESCRIPTOR), possible_errors)
+ )
+ # If nothing matched, use detail directly.
+ if len(matched_detail_cls) == 0:
+ info = detail
+ else:
+ info = matched_detail_cls[0]()
+ detail.Unpack(info)
+ error_details.append(info)
+ if isinstance(info, error_details_pb2.ErrorInfo):
+ error_info = info
+ return error_details, error_info
+
+
+def from_grpc_error(rpc_exc):
+ """Create a :class:`GoogleAPICallError` from a :class:`grpc.RpcError`.
+
+ Args:
+ rpc_exc (grpc.RpcError): The gRPC error.
+
+ Returns:
+ GoogleAPICallError: An instance of the appropriate subclass of
+ :class:`GoogleAPICallError`.
+ """
+ # NOTE(lidiz) All gRPC error shares the parent class grpc.RpcError.
+ # However, check for grpc.RpcError breaks backward compatibility.
+ if (
+ grpc is not None and isinstance(rpc_exc, grpc.Call)
+ ) or _is_informative_grpc_error(rpc_exc):
+ details, err_info = _parse_grpc_error_details(rpc_exc)
+ return from_grpc_status(
+ rpc_exc.code(),
+ rpc_exc.details(),
+ errors=(rpc_exc,),
+ details=details,
+ response=rpc_exc,
+ error_info=err_info,
+ )
+ else:
+ return GoogleAPICallError(str(rpc_exc), errors=(rpc_exc,), response=rpc_exc)
diff --git a/Lib/site-packages/google/api_core/extended_operation.py b/Lib/site-packages/google/api_core/extended_operation.py
new file mode 100644
index 0000000..d474632
--- /dev/null
+++ b/Lib/site-packages/google/api_core/extended_operation.py
@@ -0,0 +1,225 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Futures for extended long-running operations returned from Google Cloud APIs.
+
+These futures can be used to synchronously wait for the result of a
+long-running operations using :meth:`ExtendedOperation.result`:
+
+.. code-block:: python
+
+ extended_operation = my_api_client.long_running_method()
+
+ extended_operation.result()
+
+Or asynchronously using callbacks and :meth:`Operation.add_done_callback`:
+
+.. code-block:: python
+
+ extended_operation = my_api_client.long_running_method()
+
+ def my_callback(ex_op):
+ print(f"Operation {ex_op.name} completed")
+
+ extended_operation.add_done_callback(my_callback)
+
+"""
+
+import threading
+
+from google.api_core import exceptions
+from google.api_core.future import polling
+
+
+class ExtendedOperation(polling.PollingFuture):
+ """An ExtendedOperation future for interacting with a Google API Long-Running Operation.
+
+ Args:
+ extended_operation (proto.Message): The initial operation.
+ refresh (Callable[[], type(extended_operation)]): A callable that returns
+ the latest state of the operation.
+ cancel (Callable[[], None]): A callable that tries to cancel the operation.
+ polling Optional(google.api_core.retry.Retry): The configuration used
+ for polling. This can be used to control how often :meth:`done`
+ is polled. If the ``timeout`` argument to :meth:`result` is
+ specified it will override the ``polling.timeout`` property.
+ retry Optional(google.api_core.retry.Retry): DEPRECATED use ``polling``
+ instead. If specified it will override ``polling`` parameter to
+ maintain backward compatibility.
+
+ Note: Most long-running API methods use google.api_core.operation.Operation
+ This class is a wrapper for a subset of methods that use alternative
+ Long-Running Operation (LRO) semantics.
+
+ Note: there is not a concrete type the extended operation must be.
+ It MUST have fields that correspond to the following, POSSIBLY WITH DIFFERENT NAMES:
+ * name: str
+ * status: Union[str, bool, enum.Enum]
+ * error_code: int
+ * error_message: str
+ """
+
+ def __init__(
+ self,
+ extended_operation,
+ refresh,
+ cancel,
+ polling=polling.DEFAULT_POLLING,
+ **kwargs,
+ ):
+ super().__init__(polling=polling, **kwargs)
+ self._extended_operation = extended_operation
+ self._refresh = refresh
+ self._cancel = cancel
+ # Note: the extended operation does not give a good way to indicate cancellation.
+ # We make do with manually tracking cancellation and checking for doneness.
+ self._cancelled = False
+ self._completion_lock = threading.Lock()
+ # Invoke in case the operation came back already complete.
+ self._handle_refreshed_operation()
+
+ # Note: the following four properties MUST be overridden in a subclass
+ # if, and only if, the fields in the corresponding extended operation message
+ # have different names.
+ #
+ # E.g. we have an extended operation class that looks like
+ #
+ # class MyOperation(proto.Message):
+ # moniker = proto.Field(proto.STRING, number=1)
+ # status_msg = proto.Field(proto.STRING, number=2)
+ # optional http_error_code = proto.Field(proto.INT32, number=3)
+ # optional http_error_msg = proto.Field(proto.STRING, number=4)
+ #
+ # the ExtendedOperation subclass would provide property overrides that map
+ # to these (poorly named) fields.
+ @property
+ def name(self):
+ return self._extended_operation.name
+
+ @property
+ def status(self):
+ return self._extended_operation.status
+
+ @property
+ def error_code(self):
+ return self._extended_operation.error_code
+
+ @property
+ def error_message(self):
+ return self._extended_operation.error_message
+
+ def __getattr__(self, name):
+ return getattr(self._extended_operation, name)
+
+ def done(self, retry=None):
+ self._refresh_and_update(retry)
+ return self._extended_operation.done
+
+ def cancel(self):
+ if self.done():
+ return False
+
+ self._cancel()
+ self._cancelled = True
+ return True
+
+ def cancelled(self):
+ # TODO(dovs): there is not currently a good way to determine whether the
+ # operation has been cancelled.
+ # The best we can do is manually keep track of cancellation
+ # and check for doneness.
+ if not self._cancelled:
+ return False
+
+ self._refresh_and_update()
+ return self._extended_operation.done
+
+ def _refresh_and_update(self, retry=None):
+ if not self._extended_operation.done:
+ self._extended_operation = (
+ self._refresh(retry=retry) if retry else self._refresh()
+ )
+ self._handle_refreshed_operation()
+
+ def _handle_refreshed_operation(self):
+ with self._completion_lock:
+ if not self._extended_operation.done:
+ return
+
+ if self.error_code and self.error_message:
+ # Note: `errors` can be removed once proposal A from
+ # b/284179390 is implemented.
+ errors = []
+ if hasattr(self, "error") and hasattr(self.error, "errors"):
+ errors = self.error.errors
+ exception = exceptions.from_http_status(
+ status_code=self.error_code,
+ message=self.error_message,
+ response=self._extended_operation,
+ errors=errors,
+ )
+ self.set_exception(exception)
+ elif self.error_code or self.error_message:
+ exception = exceptions.GoogleAPICallError(
+ f"Unexpected error {self.error_code}: {self.error_message}"
+ )
+ self.set_exception(exception)
+ else:
+ # Extended operations have no payload.
+ self.set_result(None)
+
+ @classmethod
+ def make(cls, refresh, cancel, extended_operation, **kwargs):
+ """
+ Return an instantiated ExtendedOperation (or child) that wraps
+ * a refresh callable
+ * a cancel callable (can be a no-op)
+ * an initial result
+
+ .. note::
+ It is the caller's responsibility to set up refresh and cancel
+ with their correct request argument.
+ The reason for this is that the services that use Extended Operations
+ have rpcs that look something like the following:
+
+ // service.proto
+ service MyLongService {
+ rpc StartLongTask(StartLongTaskRequest) returns (ExtendedOperation) {
+ option (google.cloud.operation_service) = "CustomOperationService";
+ }
+ }
+
+ service CustomOperationService {
+ rpc Get(GetOperationRequest) returns (ExtendedOperation) {
+ option (google.cloud.operation_polling_method) = true;
+ }
+ }
+
+ Any info needed for the poll, e.g. a name, path params, etc.
+ is held in the request, which the initial client method is in a much
+ better position to make made because the caller made the initial request.
+
+ TL;DR: the caller sets up closures for refresh and cancel that carry
+ the properly configured requests.
+
+ Args:
+ refresh (Callable[Optional[Retry]][type(extended_operation)]): A callable that
+ returns the latest state of the operation.
+ cancel (Callable[][Any]): A callable that tries to cancel the operation
+ on a best effort basis.
+ extended_operation (Any): The initial response of the long running method.
+ See the docstring for ExtendedOperation.__init__ for requirements on
+ the type and fields of extended_operation
+ """
+ return cls(extended_operation, refresh, cancel, **kwargs)
diff --git a/Lib/site-packages/google/api_core/future/__init__.py b/Lib/site-packages/google/api_core/future/__init__.py
new file mode 100644
index 0000000..3768b2c
--- /dev/null
+++ b/Lib/site-packages/google/api_core/future/__init__.py
@@ -0,0 +1,19 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Futures for dealing with asynchronous operations."""
+
+from google.api_core.future.base import Future
+
+__all__ = ["Future"]
diff --git a/Lib/site-packages/google/api_core/future/_helpers.py b/Lib/site-packages/google/api_core/future/_helpers.py
new file mode 100644
index 0000000..9e88ca9
--- /dev/null
+++ b/Lib/site-packages/google/api_core/future/_helpers.py
@@ -0,0 +1,39 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Private helpers for futures."""
+
+import logging
+import threading
+
+
+_LOGGER = logging.getLogger(__name__)
+
+
+def start_daemon_thread(*args, **kwargs):
+ """Starts a thread and marks it as a daemon thread."""
+ thread = threading.Thread(*args, **kwargs)
+ thread.daemon = True
+ thread.start()
+ return thread
+
+
+def safe_invoke_callback(callback, *args, **kwargs):
+ """Invoke a callback, swallowing and logging any exceptions."""
+ # pylint: disable=bare-except
+ # We intentionally want to swallow all exceptions.
+ try:
+ return callback(*args, **kwargs)
+ except Exception:
+ _LOGGER.exception("Error while executing Future callback.")
diff --git a/Lib/site-packages/google/api_core/future/async_future.py b/Lib/site-packages/google/api_core/future/async_future.py
new file mode 100644
index 0000000..325ee9c
--- /dev/null
+++ b/Lib/site-packages/google/api_core/future/async_future.py
@@ -0,0 +1,162 @@
+# Copyright 2020, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""AsyncIO implementation of the abstract base Future class."""
+
+import asyncio
+
+from google.api_core import exceptions
+from google.api_core import retry
+from google.api_core import retry_async
+from google.api_core.future import base
+
+
+class _OperationNotComplete(Exception):
+ """Private exception used for polling via retry."""
+
+ pass
+
+
+RETRY_PREDICATE = retry.if_exception_type(
+ _OperationNotComplete,
+ exceptions.TooManyRequests,
+ exceptions.InternalServerError,
+ exceptions.BadGateway,
+)
+DEFAULT_RETRY = retry_async.AsyncRetry(predicate=RETRY_PREDICATE)
+
+
+class AsyncFuture(base.Future):
+ """A Future that polls peer service to self-update.
+
+ The :meth:`done` method should be implemented by subclasses. The polling
+ behavior will repeatedly call ``done`` until it returns True.
+
+ .. note::
+
+ Privacy here is intended to prevent the final class from
+ overexposing, not to prevent subclasses from accessing methods.
+
+ Args:
+ retry (google.api_core.retry.Retry): The retry configuration used
+ when polling. This can be used to control how often :meth:`done`
+ is polled. Regardless of the retry's ``deadline``, it will be
+ overridden by the ``timeout`` argument to :meth:`result`.
+ """
+
+ def __init__(self, retry=DEFAULT_RETRY):
+ super().__init__()
+ self._retry = retry
+ self._future = asyncio.get_event_loop().create_future()
+ self._background_task = None
+
+ async def done(self, retry=DEFAULT_RETRY):
+ """Checks to see if the operation is complete.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+
+ Returns:
+ bool: True if the operation is complete, False otherwise.
+ """
+ # pylint: disable=redundant-returns-doc, missing-raises-doc
+ raise NotImplementedError()
+
+ async def _done_or_raise(self):
+ """Check if the future is done and raise if it's not."""
+ result = await self.done()
+ if not result:
+ raise _OperationNotComplete()
+
+ async def running(self):
+ """True if the operation is currently running."""
+ result = await self.done()
+ return not result
+
+ async def _blocking_poll(self, timeout=None):
+ """Poll and await for the Future to be resolved.
+
+ Args:
+ timeout (int):
+ How long (in seconds) to wait for the operation to complete.
+ If None, wait indefinitely.
+ """
+ if self._future.done():
+ return
+
+ retry_ = self._retry.with_timeout(timeout)
+
+ try:
+ await retry_(self._done_or_raise)()
+ except exceptions.RetryError:
+ raise asyncio.TimeoutError(
+ "Operation did not complete within the designated " "timeout."
+ )
+
+ async def result(self, timeout=None):
+ """Get the result of the operation.
+
+ Args:
+ timeout (int):
+ How long (in seconds) to wait for the operation to complete.
+ If None, wait indefinitely.
+
+ Returns:
+ google.protobuf.Message: The Operation's result.
+
+ Raises:
+ google.api_core.GoogleAPICallError: If the operation errors or if
+ the timeout is reached before the operation completes.
+ """
+ await self._blocking_poll(timeout=timeout)
+ return self._future.result()
+
+ async def exception(self, timeout=None):
+ """Get the exception from the operation.
+
+ Args:
+ timeout (int): How long to wait for the operation to complete.
+ If None, wait indefinitely.
+
+ Returns:
+ Optional[google.api_core.GoogleAPICallError]: The operation's
+ error.
+ """
+ await self._blocking_poll(timeout=timeout)
+ return self._future.exception()
+
+ def add_done_callback(self, fn):
+ """Add a callback to be executed when the operation is complete.
+
+ If the operation is completed, the callback will be scheduled onto the
+ event loop. Otherwise, the callback will be stored and invoked when the
+ future is done.
+
+ Args:
+ fn (Callable[Future]): The callback to execute when the operation
+ is complete.
+ """
+ if self._background_task is None:
+ self._background_task = asyncio.get_event_loop().create_task(
+ self._blocking_poll()
+ )
+ self._future.add_done_callback(fn)
+
+ def set_result(self, result):
+ """Set the Future's result."""
+ self._future.set_result(result)
+
+ def set_exception(self, exception):
+ """Set the Future's exception."""
+ self._future.set_exception(exception)
diff --git a/Lib/site-packages/google/api_core/future/base.py b/Lib/site-packages/google/api_core/future/base.py
new file mode 100644
index 0000000..f300586
--- /dev/null
+++ b/Lib/site-packages/google/api_core/future/base.py
@@ -0,0 +1,64 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Abstract and helper bases for Future implementations."""
+
+import abc
+
+
+class Future(object, metaclass=abc.ABCMeta):
+ # pylint: disable=missing-docstring
+ # We inherit the interfaces here from concurrent.futures.
+
+ """Future interface.
+
+ This interface is based on :class:`concurrent.futures.Future`.
+ """
+
+ @abc.abstractmethod
+ def cancel(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def cancelled(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def running(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def done(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def result(self, timeout=None):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def exception(self, timeout=None):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_done_callback(self, fn):
+ # pylint: disable=invalid-name
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def set_result(self, result):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def set_exception(self, exception):
+ raise NotImplementedError()
diff --git a/Lib/site-packages/google/api_core/future/polling.py b/Lib/site-packages/google/api_core/future/polling.py
new file mode 100644
index 0000000..f1e2a18
--- /dev/null
+++ b/Lib/site-packages/google/api_core/future/polling.py
@@ -0,0 +1,323 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Abstract and helper bases for Future implementations."""
+
+import abc
+import concurrent.futures
+
+from google.api_core import exceptions
+from google.api_core import retry as retries
+from google.api_core.future import _helpers
+from google.api_core.future import base
+
+
+class _OperationNotComplete(Exception):
+ """Private exception used for polling via retry."""
+
+ pass
+
+
+# DEPRECATED as it conflates RPC retry and polling concepts into one.
+# Use POLLING_PREDICATE instead to configure polling.
+RETRY_PREDICATE = retries.if_exception_type(
+ _OperationNotComplete,
+ exceptions.TooManyRequests,
+ exceptions.InternalServerError,
+ exceptions.BadGateway,
+ exceptions.ServiceUnavailable,
+)
+
+# DEPRECATED: use DEFAULT_POLLING to configure LRO polling logic. Construct
+# Retry object using its default values as a baseline for any custom retry logic
+# (not to be confused with polling logic).
+DEFAULT_RETRY = retries.Retry(predicate=RETRY_PREDICATE)
+
+# POLLING_PREDICATE is supposed to poll only on _OperationNotComplete.
+# Any RPC-specific errors (like ServiceUnavailable) will be handled
+# by retry logic (not to be confused with polling logic) which is triggered for
+# every polling RPC independently of polling logic but within its context.
+POLLING_PREDICATE = retries.if_exception_type(
+ _OperationNotComplete,
+)
+
+# Default polling configuration
+DEFAULT_POLLING = retries.Retry(
+ predicate=POLLING_PREDICATE,
+ initial=1.0, # seconds
+ maximum=20.0, # seconds
+ multiplier=1.5,
+ timeout=900, # seconds
+)
+
+
+class PollingFuture(base.Future):
+ """A Future that needs to poll some service to check its status.
+
+ The :meth:`done` method should be implemented by subclasses. The polling
+ behavior will repeatedly call ``done`` until it returns True.
+
+ The actual polling logic is encapsulated in :meth:`result` method. See
+ documentation for that method for details on how polling works.
+
+ .. note::
+
+ Privacy here is intended to prevent the final class from
+ overexposing, not to prevent subclasses from accessing methods.
+
+ Args:
+ polling (google.api_core.retry.Retry): The configuration used for polling.
+ This parameter controls how often :meth:`done` is polled. If the
+ ``timeout`` argument is specified in :meth:`result` method it will
+ override the ``polling.timeout`` property.
+ retry (google.api_core.retry.Retry): DEPRECATED use ``polling`` instead.
+ If set, it will override ``polling`` parameter for backward
+ compatibility.
+ """
+
+ _DEFAULT_VALUE = object()
+
+ def __init__(self, polling=DEFAULT_POLLING, **kwargs):
+ super(PollingFuture, self).__init__()
+ self._polling = kwargs.get("retry", polling)
+ self._result = None
+ self._exception = None
+ self._result_set = False
+ """bool: Set to True when the result has been set via set_result or
+ set_exception."""
+ self._polling_thread = None
+ self._done_callbacks = []
+
+ @abc.abstractmethod
+ def done(self, retry=None):
+ """Checks to see if the operation is complete.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the
+ polling RPC (to not be confused with polling configuration. See
+ the documentation for :meth:`result` for details).
+
+ Returns:
+ bool: True if the operation is complete, False otherwise.
+ """
+ # pylint: disable=redundant-returns-doc, missing-raises-doc
+ raise NotImplementedError()
+
+ def _done_or_raise(self, retry=None):
+ """Check if the future is done and raise if it's not."""
+ if not self.done(retry=retry):
+ raise _OperationNotComplete()
+
+ def running(self):
+ """True if the operation is currently running."""
+ return not self.done()
+
+ def _blocking_poll(self, timeout=_DEFAULT_VALUE, retry=None, polling=None):
+ """Poll and wait for the Future to be resolved."""
+
+ if self._result_set:
+ return
+
+ polling = polling or self._polling
+ if timeout is not PollingFuture._DEFAULT_VALUE:
+ polling = polling.with_timeout(timeout)
+
+ try:
+ polling(self._done_or_raise)(retry=retry)
+ except exceptions.RetryError:
+ raise concurrent.futures.TimeoutError(
+ f"Operation did not complete within the designated timeout of "
+ f"{polling.timeout} seconds."
+ )
+
+ def result(self, timeout=_DEFAULT_VALUE, retry=None, polling=None):
+ """Get the result of the operation.
+
+ This method will poll for operation status periodically, blocking if
+ necessary. If you just want to make sure that this method does not block
+ for more than X seconds and you do not care about the nitty-gritty of
+ how this method operates, just call it with ``result(timeout=X)``. The
+ other parameters are for advanced use only.
+
+ Every call to this method is controlled by the following three
+ parameters, each of which has a specific, distinct role, even though all three
+ may look very similar: ``timeout``, ``retry`` and ``polling``. In most
+ cases users do not need to specify any custom values for any of these
+ parameters and may simply rely on default ones instead.
+
+ If you choose to specify custom parameters, please make sure you've
+ read the documentation below carefully.
+
+ First, please check :class:`google.api_core.retry.Retry`
+ class documentation for the proper definition of timeout and deadline
+ terms and for the definition the three different types of timeouts.
+ This class operates in terms of Retry Timeout and Polling Timeout. It
+ does not let customizing RPC timeout and the user is expected to rely on
+ default behavior for it.
+
+ The roles of each argument of this method are as follows:
+
+ ``timeout`` (int): (Optional) The Polling Timeout as defined in
+ :class:`google.api_core.retry.Retry`. If the operation does not complete
+ within this timeout an exception will be thrown. This parameter affects
+ neither Retry Timeout nor RPC Timeout.
+
+ ``retry`` (google.api_core.retry.Retry): (Optional) How to retry the
+ polling RPC. The ``retry.timeout`` property of this parameter is the
+ Retry Timeout as defined in :class:`google.api_core.retry.Retry`.
+ This parameter defines ONLY how the polling RPC call is retried
+ (i.e. what to do if the RPC we used for polling returned an error). It
+ does NOT define how the polling is done (i.e. how frequently and for
+ how long to call the polling RPC); use the ``polling`` parameter for that.
+ If a polling RPC throws and error and retrying it fails, the whole
+ future fails with the corresponding exception. If you want to tune which
+ server response error codes are not fatal for operation polling, use this
+ parameter to control that (``retry.predicate`` in particular).
+
+ ``polling`` (google.api_core.retry.Retry): (Optional) How often and
+ for how long to call the polling RPC periodically (i.e. what to do if
+ a polling rpc returned successfully but its returned result indicates
+ that the long running operation is not completed yet, so we need to
+ check it again at some point in future). This parameter does NOT define
+ how to retry each individual polling RPC in case of an error; use the
+ ``retry`` parameter for that. The ``polling.timeout`` of this parameter
+ is Polling Timeout as defined in as defined in
+ :class:`google.api_core.retry.Retry`.
+
+ For each of the arguments, there are also default values in place, which
+ will be used if a user does not specify their own. The default values
+ for the three parameters are not to be confused with the default values
+ for the corresponding arguments in this method (those serve as "not set"
+ markers for the resolution logic).
+
+ If ``timeout`` is provided (i.e.``timeout is not _DEFAULT VALUE``; note
+ the ``None`` value means "infinite timeout"), it will be used to control
+ the actual Polling Timeout. Otherwise, the ``polling.timeout`` value
+ will be used instead (see below for how the ``polling`` config itself
+ gets resolved). In other words, this parameter effectively overrides
+ the ``polling.timeout`` value if specified. This is so to preserve
+ backward compatibility.
+
+ If ``retry`` is provided (i.e. ``retry is not None``) it will be used to
+ control retry behavior for the polling RPC and the ``retry.timeout``
+ will determine the Retry Timeout. If not provided, the
+ polling RPC will be called with whichever default retry config was
+ specified for the polling RPC at the moment of the construction of the
+ polling RPC's client. For example, if the polling RPC is
+ ``operations_client.get_operation()``, the ``retry`` parameter will be
+ controlling its retry behavior (not polling behavior) and, if not
+ specified, that specific method (``operations_client.get_operation()``)
+ will be retried according to the default retry config provided during
+ creation of ``operations_client`` client instead. This argument exists
+ mainly for backward compatibility; users are very unlikely to ever need
+ to set this parameter explicitly.
+
+ If ``polling`` is provided (i.e. ``polling is not None``), it will be used
+ to control the overall polling behavior and ``polling.timeout`` will
+ control Polling Timeout unless it is overridden by ``timeout`` parameter
+ as described above. If not provided, the``polling`` parameter specified
+ during construction of this future (the ``polling`` argument in the
+ constructor) will be used instead. Note: since the ``timeout`` argument may
+ override ``polling.timeout`` value, this parameter should be viewed as
+ coupled with the ``timeout`` parameter as described above.
+
+ Args:
+ timeout (int): (Optional) How long (in seconds) to wait for the
+ operation to complete. If None, wait indefinitely.
+ retry (google.api_core.retry.Retry): (Optional) How to retry the
+ polling RPC. This defines ONLY how the polling RPC call is
+ retried (i.e. what to do if the RPC we used for polling returned
+ an error). It does NOT define how the polling is done (i.e. how
+ frequently and for how long to call the polling RPC).
+ polling (google.api_core.retry.Retry): (Optional) How often and
+ for how long to call polling RPC periodically. This parameter
+ does NOT define how to retry each individual polling RPC call
+ (use the ``retry`` parameter for that).
+
+ Returns:
+ google.protobuf.Message: The Operation's result.
+
+ Raises:
+ google.api_core.GoogleAPICallError: If the operation errors or if
+ the timeout is reached before the operation completes.
+ """
+
+ self._blocking_poll(timeout=timeout, retry=retry, polling=polling)
+
+ if self._exception is not None:
+ # pylint: disable=raising-bad-type
+ # Pylint doesn't recognize that this is valid in this case.
+ raise self._exception
+
+ return self._result
+
+ def exception(self, timeout=_DEFAULT_VALUE):
+ """Get the exception from the operation, blocking if necessary.
+
+ See the documentation for the :meth:`result` method for details on how
+ this method operates, as both ``result`` and this method rely on the
+ exact same polling logic. The only difference is that this method does
+ not accept ``retry`` and ``polling`` arguments but relies on the default ones
+ instead.
+
+ Args:
+ timeout (int): How long to wait for the operation to complete.
+ If None, wait indefinitely.
+
+ Returns:
+ Optional[google.api_core.GoogleAPICallError]: The operation's
+ error.
+ """
+ self._blocking_poll(timeout=timeout)
+ return self._exception
+
+ def add_done_callback(self, fn):
+ """Add a callback to be executed when the operation is complete.
+
+ If the operation is not already complete, this will start a helper
+ thread to poll for the status of the operation in the background.
+
+ Args:
+ fn (Callable[Future]): The callback to execute when the operation
+ is complete.
+ """
+ if self._result_set:
+ _helpers.safe_invoke_callback(fn, self)
+ return
+
+ self._done_callbacks.append(fn)
+
+ if self._polling_thread is None:
+ # The polling thread will exit on its own as soon as the operation
+ # is done.
+ self._polling_thread = _helpers.start_daemon_thread(
+ target=self._blocking_poll
+ )
+
+ def _invoke_callbacks(self, *args, **kwargs):
+ """Invoke all done callbacks."""
+ for callback in self._done_callbacks:
+ _helpers.safe_invoke_callback(callback, *args, **kwargs)
+
+ def set_result(self, result):
+ """Set the Future's result."""
+ self._result = result
+ self._result_set = True
+ self._invoke_callbacks(self)
+
+ def set_exception(self, exception):
+ """Set the Future's exception."""
+ self._exception = exception
+ self._result_set = True
+ self._invoke_callbacks(self)
diff --git a/Lib/site-packages/google/api_core/gapic_v1/__init__.py b/Lib/site-packages/google/api_core/gapic_v1/__init__.py
new file mode 100644
index 0000000..e5b7ad3
--- /dev/null
+++ b/Lib/site-packages/google/api_core/gapic_v1/__init__.py
@@ -0,0 +1,29 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from google.api_core.gapic_v1 import client_info
+from google.api_core.gapic_v1 import config
+from google.api_core.gapic_v1 import config_async
+from google.api_core.gapic_v1 import method
+from google.api_core.gapic_v1 import method_async
+from google.api_core.gapic_v1 import routing_header
+
+__all__ = [
+ "client_info",
+ "config",
+ "config_async",
+ "method",
+ "method_async",
+ "routing_header",
+]
diff --git a/Lib/site-packages/google/api_core/gapic_v1/client_info.py b/Lib/site-packages/google/api_core/gapic_v1/client_info.py
new file mode 100644
index 0000000..2de1be7
--- /dev/null
+++ b/Lib/site-packages/google/api_core/gapic_v1/client_info.py
@@ -0,0 +1,55 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for providing client information.
+
+Client information is used to send information about the calling client,
+such as the library and Python version, to API services.
+"""
+
+from google.api_core import client_info
+
+
+METRICS_METADATA_KEY = "x-goog-api-client"
+
+
+class ClientInfo(client_info.ClientInfo):
+ """Client information used to generate a user-agent for API calls.
+
+ This user-agent information is sent along with API calls to allow the
+ receiving service to do analytics on which versions of Python and Google
+ libraries are being used.
+
+ Args:
+ python_version (str): The Python interpreter version, for example,
+ ``'3.9.6'``.
+ grpc_version (Optional[str]): The gRPC library version.
+ api_core_version (str): The google-api-core library version.
+ gapic_version (Optional[str]): The version of gapic-generated client
+ library, if the library was generated by gapic.
+ client_library_version (Optional[str]): The version of the client
+ library, generally used if the client library was not generated
+ by gapic or if additional functionality was built on top of
+ a gapic client library.
+ user_agent (Optional[str]): Prefix to the user agent header. This is
+ used to supply information such as application name or partner tool.
+ Recommended format: ``application-or-tool-ID/major.minor.version``.
+ """
+
+ def to_grpc_metadata(self):
+ """Returns the gRPC metadata for this client info."""
+ return (METRICS_METADATA_KEY, self.to_user_agent())
+
+
+DEFAULT_CLIENT_INFO = ClientInfo()
diff --git a/Lib/site-packages/google/api_core/gapic_v1/config.py b/Lib/site-packages/google/api_core/gapic_v1/config.py
new file mode 100644
index 0000000..36b50d9
--- /dev/null
+++ b/Lib/site-packages/google/api_core/gapic_v1/config.py
@@ -0,0 +1,175 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for loading gapic configuration data.
+
+The Google API generator creates supplementary configuration for each RPC
+method to tell the client library how to deal with retries and timeouts.
+"""
+
+import collections
+
+import grpc
+
+from google.api_core import exceptions
+from google.api_core import retry
+from google.api_core import timeout
+
+
+_MILLIS_PER_SECOND = 1000.0
+
+
+def _exception_class_for_grpc_status_name(name):
+ """Returns the Google API exception class for a gRPC error code name.
+
+ DEPRECATED: use ``exceptions.exception_class_for_grpc_status`` method
+ directly instead.
+
+ Args:
+ name (str): The name of the gRPC status code, for example,
+ ``UNAVAILABLE``.
+
+ Returns:
+ :func:`type`: The appropriate subclass of
+ :class:`google.api_core.exceptions.GoogleAPICallError`.
+ """
+ return exceptions.exception_class_for_grpc_status(getattr(grpc.StatusCode, name))
+
+
+def _retry_from_retry_config(retry_params, retry_codes, retry_impl=retry.Retry):
+ """Creates a Retry object given a gapic retry configuration.
+
+ DEPRECATED: instantiate retry and timeout classes directly instead.
+
+ Args:
+ retry_params (dict): The retry parameter values, for example::
+
+ {
+ "initial_retry_delay_millis": 1000,
+ "retry_delay_multiplier": 2.5,
+ "max_retry_delay_millis": 120000,
+ "initial_rpc_timeout_millis": 120000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 120000,
+ "total_timeout_millis": 600000
+ }
+
+ retry_codes (sequence[str]): The list of retryable gRPC error code
+ names.
+
+ Returns:
+ google.api_core.retry.Retry: The default retry object for the method.
+ """
+ exception_classes = [
+ _exception_class_for_grpc_status_name(code) for code in retry_codes
+ ]
+ return retry_impl(
+ retry.if_exception_type(*exception_classes),
+ initial=(retry_params["initial_retry_delay_millis"] / _MILLIS_PER_SECOND),
+ maximum=(retry_params["max_retry_delay_millis"] / _MILLIS_PER_SECOND),
+ multiplier=retry_params["retry_delay_multiplier"],
+ deadline=retry_params["total_timeout_millis"] / _MILLIS_PER_SECOND,
+ )
+
+
+def _timeout_from_retry_config(retry_params):
+ """Creates a ExponentialTimeout object given a gapic retry configuration.
+
+ DEPRECATED: instantiate retry and timeout classes directly instead.
+
+ Args:
+ retry_params (dict): The retry parameter values, for example::
+
+ {
+ "initial_retry_delay_millis": 1000,
+ "retry_delay_multiplier": 2.5,
+ "max_retry_delay_millis": 120000,
+ "initial_rpc_timeout_millis": 120000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 120000,
+ "total_timeout_millis": 600000
+ }
+
+ Returns:
+ google.api_core.retry.ExponentialTimeout: The default time object for
+ the method.
+ """
+ return timeout.ExponentialTimeout(
+ initial=(retry_params["initial_rpc_timeout_millis"] / _MILLIS_PER_SECOND),
+ maximum=(retry_params["max_rpc_timeout_millis"] / _MILLIS_PER_SECOND),
+ multiplier=retry_params["rpc_timeout_multiplier"],
+ deadline=(retry_params["total_timeout_millis"] / _MILLIS_PER_SECOND),
+ )
+
+
+MethodConfig = collections.namedtuple("MethodConfig", ["retry", "timeout"])
+
+
+def parse_method_configs(interface_config, retry_impl=retry.Retry):
+ """Creates default retry and timeout objects for each method in a gapic
+ interface config.
+
+ DEPRECATED: instantiate retry and timeout classes directly instead.
+
+ Args:
+ interface_config (Mapping): The interface config section of the full
+ gapic library config. For example, If the full configuration has
+ an interface named ``google.example.v1.ExampleService`` you would
+ pass in just that interface's configuration, for example
+ ``gapic_config['interfaces']['google.example.v1.ExampleService']``.
+ retry_impl (Callable): The constructor that creates a retry decorator
+ that will be applied to the method based on method configs.
+
+ Returns:
+ Mapping[str, MethodConfig]: A mapping of RPC method names to their
+ configuration.
+ """
+ # Grab all the retry codes
+ retry_codes_map = {
+ name: retry_codes
+ for name, retry_codes in interface_config.get("retry_codes", {}).items()
+ }
+
+ # Grab all of the retry params
+ retry_params_map = {
+ name: retry_params
+ for name, retry_params in interface_config.get("retry_params", {}).items()
+ }
+
+ # Iterate through all the API methods and create a flat MethodConfig
+ # instance for each one.
+ method_configs = {}
+
+ for method_name, method_params in interface_config.get("methods", {}).items():
+ retry_params_name = method_params.get("retry_params_name")
+
+ if retry_params_name is not None:
+ retry_params = retry_params_map[retry_params_name]
+ retry_ = _retry_from_retry_config(
+ retry_params,
+ retry_codes_map[method_params["retry_codes_name"]],
+ retry_impl,
+ )
+ timeout_ = _timeout_from_retry_config(retry_params)
+
+ # No retry config, so this is a non-retryable method.
+ else:
+ retry_ = None
+ timeout_ = timeout.ConstantTimeout(
+ method_params["timeout_millis"] / _MILLIS_PER_SECOND
+ )
+
+ method_configs[method_name] = MethodConfig(retry=retry_, timeout=timeout_)
+
+ return method_configs
diff --git a/Lib/site-packages/google/api_core/gapic_v1/config_async.py b/Lib/site-packages/google/api_core/gapic_v1/config_async.py
new file mode 100644
index 0000000..13d6a48
--- /dev/null
+++ b/Lib/site-packages/google/api_core/gapic_v1/config_async.py
@@ -0,0 +1,42 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""AsyncIO helpers for loading gapic configuration data.
+
+The Google API generator creates supplementary configuration for each RPC
+method to tell the client library how to deal with retries and timeouts.
+"""
+
+from google.api_core import retry_async
+from google.api_core.gapic_v1 import config
+from google.api_core.gapic_v1.config import MethodConfig # noqa: F401
+
+
+def parse_method_configs(interface_config):
+ """Creates default retry and timeout objects for each method in a gapic
+ interface config with AsyncIO semantics.
+
+ Args:
+ interface_config (Mapping): The interface config section of the full
+ gapic library config. For example, If the full configuration has
+ an interface named ``google.example.v1.ExampleService`` you would
+ pass in just that interface's configuration, for example
+ ``gapic_config['interfaces']['google.example.v1.ExampleService']``.
+
+ Returns:
+ Mapping[str, MethodConfig]: A mapping of RPC method names to their
+ configuration.
+ """
+ return config.parse_method_configs(
+ interface_config, retry_impl=retry_async.AsyncRetry
+ )
diff --git a/Lib/site-packages/google/api_core/gapic_v1/method.py b/Lib/site-packages/google/api_core/gapic_v1/method.py
new file mode 100644
index 0000000..0f14ea9
--- /dev/null
+++ b/Lib/site-packages/google/api_core/gapic_v1/method.py
@@ -0,0 +1,253 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for wrapping low-level gRPC methods with common functionality.
+
+This is used by gapic clients to provide common error mapping, retry, timeout,
+compression, pagination, and long-running operations to gRPC methods.
+"""
+
+import enum
+import functools
+
+from google.api_core import grpc_helpers
+from google.api_core.gapic_v1 import client_info
+from google.api_core.timeout import TimeToDeadlineTimeout
+
+USE_DEFAULT_METADATA = object()
+
+
+class _MethodDefault(enum.Enum):
+ # Uses enum so that pytype/mypy knows that this is the only possible value.
+ # https://stackoverflow.com/a/60605919/101923
+ #
+ # Literal[_DEFAULT_VALUE] is an alternative, but only added in Python 3.8.
+ # https://docs.python.org/3/library/typing.html#typing.Literal
+ _DEFAULT_VALUE = object()
+
+
+DEFAULT = _MethodDefault._DEFAULT_VALUE
+"""Sentinel value indicating that a retry, timeout, or compression argument was unspecified,
+so the default should be used."""
+
+
+def _is_not_none_or_false(value):
+ return value is not None and value is not False
+
+
+def _apply_decorators(func, decorators):
+ """Apply a list of decorators to a given function.
+
+ ``decorators`` may contain items that are ``None`` or ``False`` which will
+ be ignored.
+ """
+ filtered_decorators = filter(_is_not_none_or_false, reversed(decorators))
+
+ for decorator in filtered_decorators:
+ func = decorator(func)
+
+ return func
+
+
+class _GapicCallable(object):
+ """Callable that applies retry, timeout, and metadata logic.
+
+ Args:
+ target (Callable): The low-level RPC method.
+ retry (google.api_core.retry.Retry): The default retry for the
+ callable. If ``None``, this callable will not retry by default
+ timeout (google.api_core.timeout.Timeout): The default timeout for the
+ callable (i.e. duration of time within which an RPC must terminate
+ after its start, not to be confused with deadline). If ``None``,
+ this callable will not specify a timeout argument to the low-level
+ RPC method.
+ compression (grpc.Compression): The default compression for the callable.
+ If ``None``, this callable will not specify a compression argument
+ to the low-level RPC method.
+ metadata (Sequence[Tuple[str, str]]): Additional metadata that is
+ provided to the RPC method on every invocation. This is merged with
+ any metadata specified during invocation. If ``None``, no
+ additional metadata will be passed to the RPC method.
+ """
+
+ def __init__(
+ self,
+ target,
+ retry,
+ timeout,
+ compression,
+ metadata=None,
+ ):
+ self._target = target
+ self._retry = retry
+ self._timeout = timeout
+ self._compression = compression
+ self._metadata = metadata
+
+ def __call__(
+ self, *args, timeout=DEFAULT, retry=DEFAULT, compression=DEFAULT, **kwargs
+ ):
+ """Invoke the low-level RPC with retry, timeout, compression, and metadata."""
+
+ if retry is DEFAULT:
+ retry = self._retry
+
+ if timeout is DEFAULT:
+ timeout = self._timeout
+
+ if compression is DEFAULT:
+ compression = self._compression
+
+ if isinstance(timeout, (int, float)):
+ timeout = TimeToDeadlineTimeout(timeout=timeout)
+
+ # Apply all applicable decorators.
+ wrapped_func = _apply_decorators(self._target, [retry, timeout])
+
+ # Add the user agent metadata to the call.
+ if self._metadata is not None:
+ metadata = kwargs.get("metadata", [])
+ # Due to the nature of invocation, None should be treated the same
+ # as not specified.
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ metadata.extend(self._metadata)
+ kwargs["metadata"] = metadata
+ if self._compression is not None:
+ kwargs["compression"] = compression
+
+ return wrapped_func(*args, **kwargs)
+
+
+def wrap_method(
+ func,
+ default_retry=None,
+ default_timeout=None,
+ default_compression=None,
+ client_info=client_info.DEFAULT_CLIENT_INFO,
+ *,
+ with_call=False,
+):
+ """Wrap an RPC method with common behavior.
+
+ This applies common error wrapping, retry, timeout, and compression behavior to a function.
+ The wrapped function will take optional ``retry``, ``timeout``, and ``compression``
+ arguments.
+
+ For example::
+
+ import google.api_core.gapic_v1.method
+ from google.api_core import retry
+ from google.api_core import timeout
+ from grpc import Compression
+
+ # The original RPC method.
+ def get_topic(name, timeout=None):
+ request = publisher_v2.GetTopicRequest(name=name)
+ return publisher_stub.GetTopic(request, timeout=timeout)
+
+ default_retry = retry.Retry(deadline=60)
+ default_timeout = timeout.Timeout(deadline=60)
+ default_compression = Compression.NoCompression
+ wrapped_get_topic = google.api_core.gapic_v1.method.wrap_method(
+ get_topic, default_retry)
+
+ # Execute get_topic with default retry and timeout:
+ response = wrapped_get_topic()
+
+ # Execute get_topic without doing any retying but with the default
+ # timeout:
+ response = wrapped_get_topic(retry=None)
+
+ # Execute get_topic but only retry on 5xx errors:
+ my_retry = retry.Retry(retry.if_exception_type(
+ exceptions.InternalServerError))
+ response = wrapped_get_topic(retry=my_retry)
+
+ The way this works is by late-wrapping the given function with the retry
+ and timeout decorators. Essentially, when ``wrapped_get_topic()`` is
+ called:
+
+ * ``get_topic()`` is first wrapped with the ``timeout`` into
+ ``get_topic_with_timeout``.
+ * ``get_topic_with_timeout`` is wrapped with the ``retry`` into
+ ``get_topic_with_timeout_and_retry()``.
+ * The final ``get_topic_with_timeout_and_retry`` is called passing through
+ the ``args`` and ``kwargs``.
+
+ The callstack is therefore::
+
+ method.__call__() ->
+ Retry.__call__() ->
+ Timeout.__call__() ->
+ wrap_errors() ->
+ get_topic()
+
+ Note that if ``timeout`` or ``retry`` is ``None``, then they are not
+ applied to the function. For example,
+ ``wrapped_get_topic(timeout=None, retry=None)`` is more or less
+ equivalent to just calling ``get_topic`` but with error re-mapping.
+
+ Args:
+ func (Callable[Any]): The function to wrap. It should accept an
+ optional ``timeout`` argument. If ``metadata`` is not ``None``, it
+ should accept a ``metadata`` argument.
+ default_retry (Optional[google.api_core.Retry]): The default retry
+ strategy. If ``None``, the method will not retry by default.
+ default_timeout (Optional[google.api_core.Timeout]): The default
+ timeout strategy. Can also be specified as an int or float. If
+ ``None``, the method will not have timeout specified by default.
+ default_compression (Optional[grpc.Compression]): The default
+ grpc.Compression. If ``None``, the method will not have
+ compression specified by default.
+ client_info
+ (Optional[google.api_core.gapic_v1.client_info.ClientInfo]):
+ Client information used to create a user-agent string that's
+ passed as gRPC metadata to the method. If unspecified, then
+ a sane default will be used. If ``None``, then no user agent
+ metadata will be provided to the RPC method.
+ with_call (bool): If True, wrapped grpc.UnaryUnaryMulticallables will
+ return a tuple of (response, grpc.Call) instead of just the response.
+ This is useful for extracting trailing metadata from unary calls.
+ Defaults to False.
+
+ Returns:
+ Callable: A new callable that takes optional ``retry``, ``timeout``,
+ and ``compression``
+ arguments and applies the common error mapping, retry, timeout, compression,
+ and metadata behavior to the low-level RPC method.
+ """
+ if with_call:
+ try:
+ func = func.with_call
+ except AttributeError as exc:
+ raise ValueError(
+ "with_call=True is only supported for unary calls."
+ ) from exc
+ func = grpc_helpers.wrap_errors(func)
+ if client_info is not None:
+ user_agent_metadata = [client_info.to_grpc_metadata()]
+ else:
+ user_agent_metadata = None
+
+ return functools.wraps(func)(
+ _GapicCallable(
+ func,
+ default_retry,
+ default_timeout,
+ default_compression,
+ metadata=user_agent_metadata,
+ )
+ )
diff --git a/Lib/site-packages/google/api_core/gapic_v1/method_async.py b/Lib/site-packages/google/api_core/gapic_v1/method_async.py
new file mode 100644
index 0000000..2488075
--- /dev/null
+++ b/Lib/site-packages/google/api_core/gapic_v1/method_async.py
@@ -0,0 +1,55 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""AsyncIO helpers for wrapping gRPC methods with common functionality.
+
+This is used by gapic clients to provide common error mapping, retry, timeout,
+compression, pagination, and long-running operations to gRPC methods.
+"""
+
+import functools
+
+from google.api_core import grpc_helpers_async
+from google.api_core.gapic_v1 import client_info
+from google.api_core.gapic_v1.method import _GapicCallable
+from google.api_core.gapic_v1.method import DEFAULT # noqa: F401
+from google.api_core.gapic_v1.method import USE_DEFAULT_METADATA # noqa: F401
+
+
+def wrap_method(
+ func,
+ default_retry=None,
+ default_timeout=None,
+ default_compression=None,
+ client_info=client_info.DEFAULT_CLIENT_INFO,
+):
+ """Wrap an async RPC method with common behavior.
+
+ Returns:
+ Callable: A new callable that takes optional ``retry``, ``timeout``,
+ and ``compression`` arguments and applies the common error mapping,
+ retry, timeout, metadata, and compression behavior to the low-level RPC method.
+ """
+ func = grpc_helpers_async.wrap_errors(func)
+
+ metadata = [client_info.to_grpc_metadata()] if client_info is not None else None
+
+ return functools.wraps(func)(
+ _GapicCallable(
+ func,
+ default_retry,
+ default_timeout,
+ default_compression,
+ metadata=metadata,
+ )
+ )
diff --git a/Lib/site-packages/google/api_core/gapic_v1/routing_header.py b/Lib/site-packages/google/api_core/gapic_v1/routing_header.py
new file mode 100644
index 0000000..c0c6f64
--- /dev/null
+++ b/Lib/site-packages/google/api_core/gapic_v1/routing_header.py
@@ -0,0 +1,87 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for constructing routing headers.
+
+These headers are used by Google infrastructure to determine how to route
+requests, especially for services that are regional.
+
+Generally, these headers are specified as gRPC metadata.
+"""
+
+import functools
+from enum import Enum
+from urllib.parse import urlencode
+
+ROUTING_METADATA_KEY = "x-goog-request-params"
+# This is the value for the `maxsize` argument of @functools.lru_cache
+# https://docs.python.org/3/library/functools.html#functools.lru_cache
+# This represents the number of recent function calls to store.
+ROUTING_PARAM_CACHE_SIZE = 32
+
+
+def to_routing_header(params, qualified_enums=True):
+ """Returns a routing header string for the given request parameters.
+
+ Args:
+ params (Mapping[str, str | bytes | Enum]): A dictionary containing the request
+ parameters used for routing.
+ qualified_enums (bool): Whether to represent enum values
+ as their type-qualified symbol names instead of as their
+ unqualified symbol names.
+
+ Returns:
+ str: The routing header string.
+ """
+ tuples = params.items() if isinstance(params, dict) else params
+ if not qualified_enums:
+ tuples = [(x[0], x[1].name) if isinstance(x[1], Enum) else x for x in tuples]
+ return "&".join([_urlencode_param(*t) for t in tuples])
+
+
+def to_grpc_metadata(params, qualified_enums=True):
+ """Returns the gRPC metadata containing the routing headers for the given
+ request parameters.
+
+ Args:
+ params (Mapping[str, str | bytes | Enum]): A dictionary containing the request
+ parameters used for routing.
+ qualified_enums (bool): Whether to represent enum values
+ as their type-qualified symbol names instead of as their
+ unqualified symbol names.
+
+ Returns:
+ Tuple(str, str): The gRPC metadata containing the routing header key
+ and value.
+ """
+ return (ROUTING_METADATA_KEY, to_routing_header(params, qualified_enums))
+
+
+# use caching to avoid repeated computation
+@functools.lru_cache(maxsize=ROUTING_PARAM_CACHE_SIZE)
+def _urlencode_param(key, value):
+ """Cacheable wrapper over urlencode
+
+ Args:
+ key (str): The key of the parameter to encode.
+ value (str | bytes | Enum): The value of the parameter to encode.
+
+ Returns:
+ str: The encoded parameter.
+ """
+ return urlencode(
+ {key: value},
+ # Per Google API policy (go/api-url-encoding), / is not encoded.
+ safe="/",
+ )
diff --git a/Lib/site-packages/google/api_core/general_helpers.py b/Lib/site-packages/google/api_core/general_helpers.py
new file mode 100644
index 0000000..a6af45b
--- /dev/null
+++ b/Lib/site-packages/google/api_core/general_helpers.py
@@ -0,0 +1,16 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This import for backward compatibility only.
+from functools import wraps # noqa: F401 pragma: NO COVER
diff --git a/Lib/site-packages/google/api_core/grpc_helpers.py b/Lib/site-packages/google/api_core/grpc_helpers.py
new file mode 100644
index 0000000..21c7315
--- /dev/null
+++ b/Lib/site-packages/google/api_core/grpc_helpers.py
@@ -0,0 +1,600 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for :mod:`grpc`."""
+from typing import Generic, Iterator, Optional, TypeVar
+
+import collections
+import functools
+import warnings
+
+import grpc
+
+from google.api_core import exceptions
+import google.auth
+import google.auth.credentials
+import google.auth.transport.grpc
+import google.auth.transport.requests
+import google.protobuf
+
+PROTOBUF_VERSION = google.protobuf.__version__
+
+# The grpcio-gcp package only has support for protobuf < 4
+if PROTOBUF_VERSION[0:2] == "3.": # pragma: NO COVER
+ try:
+ import grpc_gcp
+
+ warnings.warn(
+ """Support for grpcio-gcp is deprecated. This feature will be
+ removed from `google-api-core` after January 1, 2024. If you need to
+ continue to use this feature, please pin to a specific version of
+ `google-api-core`.""",
+ DeprecationWarning,
+ )
+ HAS_GRPC_GCP = True
+ except ImportError:
+ HAS_GRPC_GCP = False
+else:
+ HAS_GRPC_GCP = False
+
+
+# The list of gRPC Callable interfaces that return iterators.
+_STREAM_WRAP_CLASSES = (grpc.UnaryStreamMultiCallable, grpc.StreamStreamMultiCallable)
+
+# denotes the proto response type for grpc calls
+P = TypeVar("P")
+
+
+def _patch_callable_name(callable_):
+ """Fix-up gRPC callable attributes.
+
+ gRPC callable lack the ``__name__`` attribute which causes
+ :func:`functools.wraps` to error. This adds the attribute if needed.
+ """
+ if not hasattr(callable_, "__name__"):
+ callable_.__name__ = callable_.__class__.__name__
+
+
+def _wrap_unary_errors(callable_):
+ """Map errors for Unary-Unary and Stream-Unary gRPC callables."""
+ _patch_callable_name(callable_)
+
+ @functools.wraps(callable_)
+ def error_remapped_callable(*args, **kwargs):
+ try:
+ return callable_(*args, **kwargs)
+ except grpc.RpcError as exc:
+ raise exceptions.from_grpc_error(exc) from exc
+
+ return error_remapped_callable
+
+
+class _StreamingResponseIterator(Generic[P], grpc.Call):
+ def __init__(self, wrapped, prefetch_first_result=True):
+ self._wrapped = wrapped
+
+ # This iterator is used in a retry context, and returned outside after init.
+ # gRPC will not throw an exception until the stream is consumed, so we need
+ # to retrieve the first result, in order to fail, in order to trigger a retry.
+ try:
+ if prefetch_first_result:
+ self._stored_first_result = next(self._wrapped)
+ except TypeError:
+ # It is possible the wrapped method isn't an iterable (a grpc.Call
+ # for instance). If this happens don't store the first result.
+ pass
+ except StopIteration:
+ # ignore stop iteration at this time. This should be handled outside of retry.
+ pass
+
+ def __iter__(self) -> Iterator[P]:
+ """This iterator is also an iterable that returns itself."""
+ return self
+
+ def __next__(self) -> P:
+ """Get the next response from the stream.
+
+ Returns:
+ protobuf.Message: A single response from the stream.
+ """
+ try:
+ if hasattr(self, "_stored_first_result"):
+ result = self._stored_first_result
+ del self._stored_first_result
+ return result
+ return next(self._wrapped)
+ except grpc.RpcError as exc:
+ # If the stream has already returned data, we cannot recover here.
+ raise exceptions.from_grpc_error(exc) from exc
+
+ # grpc.Call & grpc.RpcContext interface
+
+ def add_callback(self, callback):
+ return self._wrapped.add_callback(callback)
+
+ def cancel(self):
+ return self._wrapped.cancel()
+
+ def code(self):
+ return self._wrapped.code()
+
+ def details(self):
+ return self._wrapped.details()
+
+ def initial_metadata(self):
+ return self._wrapped.initial_metadata()
+
+ def is_active(self):
+ return self._wrapped.is_active()
+
+ def time_remaining(self):
+ return self._wrapped.time_remaining()
+
+ def trailing_metadata(self):
+ return self._wrapped.trailing_metadata()
+
+
+# public type alias denoting the return type of streaming gapic calls
+GrpcStream = _StreamingResponseIterator[P]
+
+
+def _wrap_stream_errors(callable_):
+ """Wrap errors for Unary-Stream and Stream-Stream gRPC callables.
+
+ The callables that return iterators require a bit more logic to re-map
+ errors when iterating. This wraps both the initial invocation and the
+ iterator of the return value to re-map errors.
+ """
+ _patch_callable_name(callable_)
+
+ @functools.wraps(callable_)
+ def error_remapped_callable(*args, **kwargs):
+ try:
+ result = callable_(*args, **kwargs)
+ # Auto-fetching the first result causes PubSub client's streaming pull
+ # to hang when re-opening the stream, thus we need examine the hacky
+ # hidden flag to see if pre-fetching is disabled.
+ # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257
+ prefetch_first = getattr(callable_, "_prefetch_first_result_", True)
+ return _StreamingResponseIterator(
+ result, prefetch_first_result=prefetch_first
+ )
+ except grpc.RpcError as exc:
+ raise exceptions.from_grpc_error(exc) from exc
+
+ return error_remapped_callable
+
+
+def wrap_errors(callable_):
+ """Wrap a gRPC callable and map :class:`grpc.RpcErrors` to friendly error
+ classes.
+
+ Errors raised by the gRPC callable are mapped to the appropriate
+ :class:`google.api_core.exceptions.GoogleAPICallError` subclasses.
+ The original `grpc.RpcError` (which is usually also a `grpc.Call`) is
+ available from the ``response`` property on the mapped exception. This
+ is useful for extracting metadata from the original error.
+
+ Args:
+ callable_ (Callable): A gRPC callable.
+
+ Returns:
+ Callable: The wrapped gRPC callable.
+ """
+ if isinstance(callable_, _STREAM_WRAP_CLASSES):
+ return _wrap_stream_errors(callable_)
+ else:
+ return _wrap_unary_errors(callable_)
+
+
+def _create_composite_credentials(
+ credentials=None,
+ credentials_file=None,
+ default_scopes=None,
+ scopes=None,
+ ssl_credentials=None,
+ quota_project_id=None,
+ default_host=None,
+):
+ """Create the composite credentials for secure channels.
+
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials. If
+ not specified, then this function will attempt to ascertain the
+ credentials from the environment using :func:`google.auth.default`.
+ credentials_file (str): A file with credentials that can be loaded with
+ :func:`google.auth.load_credentials_from_file`. This argument is
+ mutually exclusive with credentials.
+ default_scopes (Sequence[str]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ scopes (Sequence[str]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ ssl_credentials (grpc.ChannelCredentials): Optional SSL channel
+ credentials. This can be used to specify different certificates.
+ quota_project_id (str): An optional project to use for billing and quota.
+ default_host (str): The default endpoint. e.g., "pubsub.googleapis.com".
+
+ Returns:
+ grpc.ChannelCredentials: The composed channel credentials object.
+
+ Raises:
+ google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed.
+ """
+ if credentials and credentials_file:
+ raise exceptions.DuplicateCredentialArgs(
+ "'credentials' and 'credentials_file' are mutually exclusive."
+ )
+
+ if credentials_file:
+ credentials, _ = google.auth.load_credentials_from_file(
+ credentials_file, scopes=scopes, default_scopes=default_scopes
+ )
+ elif credentials:
+ credentials = google.auth.credentials.with_scopes_if_required(
+ credentials, scopes=scopes, default_scopes=default_scopes
+ )
+ else:
+ credentials, _ = google.auth.default(
+ scopes=scopes, default_scopes=default_scopes
+ )
+
+ if quota_project_id and isinstance(
+ credentials, google.auth.credentials.CredentialsWithQuotaProject
+ ):
+ credentials = credentials.with_quota_project(quota_project_id)
+
+ request = google.auth.transport.requests.Request()
+
+ # Create the metadata plugin for inserting the authorization header.
+ metadata_plugin = google.auth.transport.grpc.AuthMetadataPlugin(
+ credentials,
+ request,
+ default_host=default_host,
+ )
+
+ # Create a set of grpc.CallCredentials using the metadata plugin.
+ google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin)
+
+ # if `ssl_credentials` is set, use `grpc.composite_channel_credentials` instead of
+ # `grpc.compute_engine_channel_credentials` as the former supports passing
+ # `ssl_credentials` via `channel_credentials` which is needed for mTLS.
+ if ssl_credentials:
+ # Combine the ssl credentials and the authorization credentials.
+ # See https://grpc.github.io/grpc/python/grpc.html#grpc.composite_channel_credentials
+ return grpc.composite_channel_credentials(
+ ssl_credentials, google_auth_credentials
+ )
+ else:
+ # Use grpc.compute_engine_channel_credentials in order to support Direct Path.
+ # See https://grpc.github.io/grpc/python/grpc.html#grpc.compute_engine_channel_credentials
+ # TODO(https://github.com/googleapis/python-api-core/issues/598):
+ # Although `grpc.compute_engine_channel_credentials` returns channel credentials
+ # outside of a Google Compute Engine environment (GCE), we should determine if
+ # there is a way to reliably detect a GCE environment so that
+ # `grpc.compute_engine_channel_credentials` is not called outside of GCE.
+ return grpc.compute_engine_channel_credentials(google_auth_credentials)
+
+
+def create_channel(
+ target,
+ credentials=None,
+ scopes=None,
+ ssl_credentials=None,
+ credentials_file=None,
+ quota_project_id=None,
+ default_scopes=None,
+ default_host=None,
+ compression=None,
+ attempt_direct_path: Optional[bool] = False,
+ **kwargs,
+):
+ """Create a secure channel with credentials.
+
+ Args:
+ target (str): The target service address in the format 'hostname:port'.
+ credentials (google.auth.credentials.Credentials): The credentials. If
+ not specified, then this function will attempt to ascertain the
+ credentials from the environment using :func:`google.auth.default`.
+ scopes (Sequence[str]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ ssl_credentials (grpc.ChannelCredentials): Optional SSL channel
+ credentials. This can be used to specify different certificates.
+ credentials_file (str): A file with credentials that can be loaded with
+ :func:`google.auth.load_credentials_from_file`. This argument is
+ mutually exclusive with credentials.
+ quota_project_id (str): An optional project to use for billing and quota.
+ default_scopes (Sequence[str]): Default scopes passed by a Google client
+ library. Use 'scopes' for user-defined scopes.
+ default_host (str): The default endpoint. e.g., "pubsub.googleapis.com".
+ compression (grpc.Compression): An optional value indicating the
+ compression method to be used over the lifetime of the channel.
+ attempt_direct_path (Optional[bool]): If set, Direct Path will be attempted
+ when the request is made. Direct Path is only available within a Google
+ Compute Engine (GCE) environment and provides a proxyless connection
+ which increases the available throughput, reduces latency, and increases
+ reliability. Note:
+
+ - This argument should only be set in a GCE environment and for Services
+ that are known to support Direct Path.
+ - If this argument is set outside of GCE, then this request will fail
+ unless the back-end service happens to have configured fall-back to DNS.
+ - If the request causes a `ServiceUnavailable` response, it is recommended
+ that the client repeat the request with `attempt_direct_path` set to
+ `False` as the Service may not support Direct Path.
+ - Using `ssl_credentials` with `attempt_direct_path` set to `True` will
+ result in `ValueError` as this combination is not yet supported.
+
+ kwargs: Additional key-word args passed to
+ :func:`grpc_gcp.secure_channel` or :func:`grpc.secure_channel`.
+ Note: `grpc_gcp` is only supported in environments with protobuf < 4.0.0.
+
+ Returns:
+ grpc.Channel: The created channel.
+
+ Raises:
+ google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed.
+ ValueError: If `ssl_credentials` is set and `attempt_direct_path` is set to `True`.
+ """
+
+ # If `ssl_credentials` is set and `attempt_direct_path` is set to `True`,
+ # raise ValueError as this is not yet supported.
+ # See https://github.com/googleapis/python-api-core/issues/590
+ if ssl_credentials and attempt_direct_path:
+ raise ValueError("Using ssl_credentials with Direct Path is not supported")
+
+ composite_credentials = _create_composite_credentials(
+ credentials=credentials,
+ credentials_file=credentials_file,
+ default_scopes=default_scopes,
+ scopes=scopes,
+ ssl_credentials=ssl_credentials,
+ quota_project_id=quota_project_id,
+ default_host=default_host,
+ )
+
+ # Note that grpcio-gcp is deprecated
+ if HAS_GRPC_GCP: # pragma: NO COVER
+ if compression is not None and compression != grpc.Compression.NoCompression:
+ warnings.warn(
+ "The `compression` argument is ignored for grpc_gcp.secure_channel creation.",
+ DeprecationWarning,
+ )
+ if attempt_direct_path:
+ warnings.warn(
+ """The `attempt_direct_path` argument is ignored for grpc_gcp.secure_channel creation.""",
+ DeprecationWarning,
+ )
+ return grpc_gcp.secure_channel(target, composite_credentials, **kwargs)
+
+ if attempt_direct_path:
+ target = _modify_target_for_direct_path(target)
+
+ return grpc.secure_channel(
+ target, composite_credentials, compression=compression, **kwargs
+ )
+
+
+def _modify_target_for_direct_path(target: str) -> str:
+ """
+ Given a target, return a modified version which is compatible with Direct Path.
+
+ Args:
+ target (str): The target service address in the format 'hostname[:port]' or
+ 'dns://hostname[:port]'.
+
+ Returns:
+ target (str): The target service address which is converted into a format compatible with Direct Path.
+ If the target contains `dns:///` or does not contain `:///`, the target will be converted in
+ a format compatible with Direct Path; otherwise the original target will be returned as the
+ original target may already denote Direct Path.
+ """
+
+ # A DNS prefix may be included with the target to indicate the endpoint is living in the Internet,
+ # outside of Google Cloud Platform.
+ dns_prefix = "dns:///"
+ # Remove "dns:///" if `attempt_direct_path` is set to True as
+ # the Direct Path prefix `google-c2p:///` will be used instead.
+ target = target.replace(dns_prefix, "")
+
+ direct_path_separator = ":///"
+ if direct_path_separator not in target:
+ target_without_port = target.split(":")[0]
+ # Modify the target to use Direct Path by adding the `google-c2p:///` prefix
+ target = f"google-c2p{direct_path_separator}{target_without_port}"
+ return target
+
+
+_MethodCall = collections.namedtuple(
+ "_MethodCall", ("request", "timeout", "metadata", "credentials", "compression")
+)
+
+_ChannelRequest = collections.namedtuple("_ChannelRequest", ("method", "request"))
+
+
+class _CallableStub(object):
+ """Stub for the grpc.*MultiCallable interfaces."""
+
+ def __init__(self, method, channel):
+ self._method = method
+ self._channel = channel
+ self.response = None
+ """Union[protobuf.Message, Callable[protobuf.Message], exception]:
+ The response to give when invoking this callable. If this is a
+ callable, it will be invoked with the request protobuf. If it's an
+ exception, the exception will be raised when this is invoked.
+ """
+ self.responses = None
+ """Iterator[
+ Union[protobuf.Message, Callable[protobuf.Message], exception]]:
+ An iterator of responses. If specified, self.response will be populated
+ on each invocation by calling ``next(self.responses)``."""
+ self.requests = []
+ """List[protobuf.Message]: All requests sent to this callable."""
+ self.calls = []
+ """List[Tuple]: All invocations of this callable. Each tuple is the
+ request, timeout, metadata, compression, and credentials."""
+
+ def __call__(
+ self, request, timeout=None, metadata=None, credentials=None, compression=None
+ ):
+ self._channel.requests.append(_ChannelRequest(self._method, request))
+ self.calls.append(
+ _MethodCall(request, timeout, metadata, credentials, compression)
+ )
+ self.requests.append(request)
+
+ response = self.response
+ if self.responses is not None:
+ if response is None:
+ response = next(self.responses)
+ else:
+ raise ValueError(
+ "{method}.response and {method}.responses are mutually "
+ "exclusive.".format(method=self._method)
+ )
+
+ if callable(response):
+ return response(request)
+
+ if isinstance(response, Exception):
+ raise response
+
+ if response is not None:
+ return response
+
+ raise ValueError('Method stub for "{}" has no response.'.format(self._method))
+
+
+def _simplify_method_name(method):
+ """Simplifies a gRPC method name.
+
+ When gRPC invokes the channel to create a callable, it gives a full
+ method name like "/google.pubsub.v1.Publisher/CreateTopic". This
+ returns just the name of the method, in this case "CreateTopic".
+
+ Args:
+ method (str): The name of the method.
+
+ Returns:
+ str: The simplified name of the method.
+ """
+ return method.rsplit("/", 1).pop()
+
+
+class ChannelStub(grpc.Channel):
+ """A testing stub for the grpc.Channel interface.
+
+ This can be used to test any client that eventually uses a gRPC channel
+ to communicate. By passing in a channel stub, you can configure which
+ responses are returned and track which requests are made.
+
+ For example:
+
+ .. code-block:: python
+
+ channel_stub = grpc_helpers.ChannelStub()
+ client = FooClient(channel=channel_stub)
+
+ channel_stub.GetFoo.response = foo_pb2.Foo(name='bar')
+
+ foo = client.get_foo(labels=['baz'])
+
+ assert foo.name == 'bar'
+ assert channel_stub.GetFoo.requests[0].labels = ['baz']
+
+ Each method on the stub can be accessed and configured on the channel.
+ Here's some examples of various configurations:
+
+ .. code-block:: python
+
+ # Return a basic response:
+
+ channel_stub.GetFoo.response = foo_pb2.Foo(name='bar')
+ assert client.get_foo().name == 'bar'
+
+ # Raise an exception:
+ channel_stub.GetFoo.response = NotFound('...')
+
+ with pytest.raises(NotFound):
+ client.get_foo()
+
+ # Use a sequence of responses:
+ channel_stub.GetFoo.responses = iter([
+ foo_pb2.Foo(name='bar'),
+ foo_pb2.Foo(name='baz'),
+ ])
+
+ assert client.get_foo().name == 'bar'
+ assert client.get_foo().name == 'baz'
+
+ # Use a callable
+
+ def on_get_foo(request):
+ return foo_pb2.Foo(name='bar' + request.id)
+
+ channel_stub.GetFoo.response = on_get_foo
+
+ assert client.get_foo(id='123').name == 'bar123'
+ """
+
+ def __init__(self, responses=[]):
+ self.requests = []
+ """Sequence[Tuple[str, protobuf.Message]]: A list of all requests made
+ on this channel in order. The tuple is of method name, request
+ message."""
+ self._method_stubs = {}
+
+ def _stub_for_method(self, method):
+ method = _simplify_method_name(method)
+ self._method_stubs[method] = _CallableStub(method, self)
+ return self._method_stubs[method]
+
+ def __getattr__(self, key):
+ try:
+ return self._method_stubs[key]
+ except KeyError:
+ raise AttributeError
+
+ def unary_unary(self, method, request_serializer=None, response_deserializer=None):
+ """grpc.Channel.unary_unary implementation."""
+ return self._stub_for_method(method)
+
+ def unary_stream(self, method, request_serializer=None, response_deserializer=None):
+ """grpc.Channel.unary_stream implementation."""
+ return self._stub_for_method(method)
+
+ def stream_unary(self, method, request_serializer=None, response_deserializer=None):
+ """grpc.Channel.stream_unary implementation."""
+ return self._stub_for_method(method)
+
+ def stream_stream(
+ self, method, request_serializer=None, response_deserializer=None
+ ):
+ """grpc.Channel.stream_stream implementation."""
+ return self._stub_for_method(method)
+
+ def subscribe(self, callback, try_to_connect=False):
+ """grpc.Channel.subscribe implementation."""
+ pass
+
+ def unsubscribe(self, callback):
+ """grpc.Channel.unsubscribe implementation."""
+ pass
+
+ def close(self):
+ """grpc.Channel.close implementation."""
+ pass
diff --git a/Lib/site-packages/google/api_core/grpc_helpers_async.py b/Lib/site-packages/google/api_core/grpc_helpers_async.py
new file mode 100644
index 0000000..9423d2b
--- /dev/null
+++ b/Lib/site-packages/google/api_core/grpc_helpers_async.py
@@ -0,0 +1,336 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""AsyncIO helpers for :mod:`grpc` supporting 3.7+.
+
+Please combine more detailed docstring in grpc_helpers.py to use following
+functions. This module is implementing the same surface with AsyncIO semantics.
+"""
+
+import asyncio
+import functools
+
+from typing import AsyncGenerator, Generic, Iterator, Optional, TypeVar
+
+import grpc
+from grpc import aio
+
+from google.api_core import exceptions, grpc_helpers
+
+# denotes the proto response type for grpc calls
+P = TypeVar("P")
+
+# NOTE(lidiz) Alternatively, we can hack "__getattribute__" to perform
+# automatic patching for us. But that means the overhead of creating an
+# extra Python function spreads to every single send and receive.
+
+
+class _WrappedCall(aio.Call):
+ def __init__(self):
+ self._call = None
+
+ def with_call(self, call):
+ """Supplies the call object separately to keep __init__ clean."""
+ self._call = call
+ return self
+
+ async def initial_metadata(self):
+ return await self._call.initial_metadata()
+
+ async def trailing_metadata(self):
+ return await self._call.trailing_metadata()
+
+ async def code(self):
+ return await self._call.code()
+
+ async def details(self):
+ return await self._call.details()
+
+ def cancelled(self):
+ return self._call.cancelled()
+
+ def done(self):
+ return self._call.done()
+
+ def time_remaining(self):
+ return self._call.time_remaining()
+
+ def cancel(self):
+ return self._call.cancel()
+
+ def add_done_callback(self, callback):
+ self._call.add_done_callback(callback)
+
+ async def wait_for_connection(self):
+ try:
+ await self._call.wait_for_connection()
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+
+class _WrappedUnaryResponseMixin(Generic[P], _WrappedCall):
+ def __await__(self) -> Iterator[P]:
+ try:
+ response = yield from self._call.__await__()
+ return response
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+
+class _WrappedStreamResponseMixin(Generic[P], _WrappedCall):
+ def __init__(self):
+ self._wrapped_async_generator = None
+
+ async def read(self) -> P:
+ try:
+ return await self._call.read()
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+ async def _wrapped_aiter(self) -> AsyncGenerator[P, None]:
+ try:
+ # NOTE(lidiz) coverage doesn't understand the exception raised from
+ # __anext__ method. It is covered by test case:
+ # test_wrap_stream_errors_aiter_non_rpc_error
+ async for response in self._call: # pragma: no branch
+ yield response
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+ def __aiter__(self) -> AsyncGenerator[P, None]:
+ if not self._wrapped_async_generator:
+ self._wrapped_async_generator = self._wrapped_aiter()
+ return self._wrapped_async_generator
+
+
+class _WrappedStreamRequestMixin(_WrappedCall):
+ async def write(self, request):
+ try:
+ await self._call.write(request)
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+ async def done_writing(self):
+ try:
+ await self._call.done_writing()
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+
+# NOTE(lidiz) Implementing each individual class separately, so we don't
+# expose any API that should not be seen. E.g., __aiter__ in unary-unary
+# RPC, or __await__ in stream-stream RPC.
+class _WrappedUnaryUnaryCall(_WrappedUnaryResponseMixin[P], aio.UnaryUnaryCall):
+ """Wrapped UnaryUnaryCall to map exceptions."""
+
+
+class _WrappedUnaryStreamCall(_WrappedStreamResponseMixin[P], aio.UnaryStreamCall):
+ """Wrapped UnaryStreamCall to map exceptions."""
+
+
+class _WrappedStreamUnaryCall(
+ _WrappedUnaryResponseMixin[P], _WrappedStreamRequestMixin, aio.StreamUnaryCall
+):
+ """Wrapped StreamUnaryCall to map exceptions."""
+
+
+class _WrappedStreamStreamCall(
+ _WrappedStreamRequestMixin, _WrappedStreamResponseMixin[P], aio.StreamStreamCall
+):
+ """Wrapped StreamStreamCall to map exceptions."""
+
+
+# public type alias denoting the return type of async streaming gapic calls
+GrpcAsyncStream = _WrappedStreamResponseMixin[P]
+# public type alias denoting the return type of unary gapic calls
+AwaitableGrpcCall = _WrappedUnaryResponseMixin[P]
+
+
+def _wrap_unary_errors(callable_):
+ """Map errors for Unary-Unary async callables."""
+ grpc_helpers._patch_callable_name(callable_)
+
+ @functools.wraps(callable_)
+ def error_remapped_callable(*args, **kwargs):
+ call = callable_(*args, **kwargs)
+ return _WrappedUnaryUnaryCall().with_call(call)
+
+ return error_remapped_callable
+
+
+def _wrap_stream_errors(callable_):
+ """Map errors for streaming RPC async callables."""
+ grpc_helpers._patch_callable_name(callable_)
+
+ @functools.wraps(callable_)
+ async def error_remapped_callable(*args, **kwargs):
+ call = callable_(*args, **kwargs)
+
+ if isinstance(call, aio.UnaryStreamCall):
+ call = _WrappedUnaryStreamCall().with_call(call)
+ elif isinstance(call, aio.StreamUnaryCall):
+ call = _WrappedStreamUnaryCall().with_call(call)
+ elif isinstance(call, aio.StreamStreamCall):
+ call = _WrappedStreamStreamCall().with_call(call)
+ else:
+ raise TypeError("Unexpected type of call %s" % type(call))
+
+ await call.wait_for_connection()
+ return call
+
+ return error_remapped_callable
+
+
+def wrap_errors(callable_):
+ """Wrap a gRPC async callable and map :class:`grpc.RpcErrors` to
+ friendly error classes.
+
+ Errors raised by the gRPC callable are mapped to the appropriate
+ :class:`google.api_core.exceptions.GoogleAPICallError` subclasses. The
+ original `grpc.RpcError` (which is usually also a `grpc.Call`) is
+ available from the ``response`` property on the mapped exception. This
+ is useful for extracting metadata from the original error.
+
+ Args:
+ callable_ (Callable): A gRPC callable.
+
+ Returns: Callable: The wrapped gRPC callable.
+ """
+ if isinstance(callable_, aio.UnaryUnaryMultiCallable):
+ return _wrap_unary_errors(callable_)
+ else:
+ return _wrap_stream_errors(callable_)
+
+
+def create_channel(
+ target,
+ credentials=None,
+ scopes=None,
+ ssl_credentials=None,
+ credentials_file=None,
+ quota_project_id=None,
+ default_scopes=None,
+ default_host=None,
+ compression=None,
+ attempt_direct_path: Optional[bool] = False,
+ **kwargs
+):
+ """Create an AsyncIO secure channel with credentials.
+
+ Args:
+ target (str): The target service address in the format 'hostname:port'.
+ credentials (google.auth.credentials.Credentials): The credentials. If
+ not specified, then this function will attempt to ascertain the
+ credentials from the environment using :func:`google.auth.default`.
+ scopes (Sequence[str]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ ssl_credentials (grpc.ChannelCredentials): Optional SSL channel
+ credentials. This can be used to specify different certificates.
+ credentials_file (str): A file with credentials that can be loaded with
+ :func:`google.auth.load_credentials_from_file`. This argument is
+ mutually exclusive with credentials.
+ quota_project_id (str): An optional project to use for billing and quota.
+ default_scopes (Sequence[str]): Default scopes passed by a Google client
+ library. Use 'scopes' for user-defined scopes.
+ default_host (str): The default endpoint. e.g., "pubsub.googleapis.com".
+ compression (grpc.Compression): An optional value indicating the
+ compression method to be used over the lifetime of the channel.
+ attempt_direct_path (Optional[bool]): If set, Direct Path will be attempted
+ when the request is made. Direct Path is only available within a Google
+ Compute Engine (GCE) environment and provides a proxyless connection
+ which increases the available throughput, reduces latency, and increases
+ reliability. Note:
+
+ - This argument should only be set in a GCE environment and for Services
+ that are known to support Direct Path.
+ - If this argument is set outside of GCE, then this request will fail
+ unless the back-end service happens to have configured fall-back to DNS.
+ - If the request causes a `ServiceUnavailable` response, it is recommended
+ that the client repeat the request with `attempt_direct_path` set to
+ `False` as the Service may not support Direct Path.
+ - Using `ssl_credentials` with `attempt_direct_path` set to `True` will
+ result in `ValueError` as this combination is not yet supported.
+
+ kwargs: Additional key-word args passed to :func:`aio.secure_channel`.
+
+ Returns:
+ aio.Channel: The created channel.
+
+ Raises:
+ google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed.
+ ValueError: If `ssl_credentials` is set and `attempt_direct_path` is set to `True`.
+ """
+
+ # If `ssl_credentials` is set and `attempt_direct_path` is set to `True`,
+ # raise ValueError as this is not yet supported.
+ # See https://github.com/googleapis/python-api-core/issues/590
+ if ssl_credentials and attempt_direct_path:
+ raise ValueError("Using ssl_credentials with Direct Path is not supported")
+
+ composite_credentials = grpc_helpers._create_composite_credentials(
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ default_scopes=default_scopes,
+ ssl_credentials=ssl_credentials,
+ quota_project_id=quota_project_id,
+ default_host=default_host,
+ )
+
+ if attempt_direct_path:
+ target = grpc_helpers._modify_target_for_direct_path(target)
+
+ return aio.secure_channel(
+ target, composite_credentials, compression=compression, **kwargs
+ )
+
+
+class FakeUnaryUnaryCall(_WrappedUnaryUnaryCall):
+ """Fake implementation for unary-unary RPCs.
+
+ It is a dummy object for response message. Supply the intended response
+ upon the initialization, and the coroutine will return the exact response
+ message.
+ """
+
+ def __init__(self, response=object()):
+ self.response = response
+ self._future = asyncio.get_event_loop().create_future()
+ self._future.set_result(self.response)
+
+ def __await__(self):
+ response = yield from self._future.__await__()
+ return response
+
+
+class FakeStreamUnaryCall(_WrappedStreamUnaryCall):
+ """Fake implementation for stream-unary RPCs.
+
+ It is a dummy object for response message. Supply the intended response
+ upon the initialization, and the coroutine will return the exact response
+ message.
+ """
+
+ def __init__(self, response=object()):
+ self.response = response
+ self._future = asyncio.get_event_loop().create_future()
+ self._future.set_result(self.response)
+
+ def __await__(self):
+ response = yield from self._future.__await__()
+ return response
+
+ async def wait_for_connection(self):
+ pass
diff --git a/Lib/site-packages/google/api_core/iam.py b/Lib/site-packages/google/api_core/iam.py
new file mode 100644
index 0000000..4437c70
--- /dev/null
+++ b/Lib/site-packages/google/api_core/iam.py
@@ -0,0 +1,427 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Non-API-specific IAM policy definitions
+
+For allowed roles / permissions, see:
+https://cloud.google.com/iam/docs/understanding-roles
+
+Example usage:
+
+.. code-block:: python
+
+ # ``get_iam_policy`` returns a :class:'~google.api_core.iam.Policy`.
+ policy = resource.get_iam_policy(requested_policy_version=3)
+
+ phred = "user:phred@example.com"
+ admin_group = "group:admins@groups.example.com"
+ account = "serviceAccount:account-1234@accounts.example.com"
+
+ policy.version = 3
+ policy.bindings = [
+ {
+ "role": "roles/owner",
+ "members": {phred, admin_group, account}
+ },
+ {
+ "role": "roles/editor",
+ "members": {"allAuthenticatedUsers"}
+ },
+ {
+ "role": "roles/viewer",
+ "members": {"allUsers"}
+ "condition": {
+ "title": "request_time",
+ "description": "Requests made before 2021-01-01T00:00:00Z",
+ "expression": "request.time < timestamp(\"2021-01-01T00:00:00Z\")"
+ }
+ }
+ ]
+
+ resource.set_iam_policy(policy)
+"""
+
+import collections
+import collections.abc
+import operator
+import warnings
+
+# Generic IAM roles
+
+OWNER_ROLE = "roles/owner"
+"""Generic role implying all rights to an object."""
+
+EDITOR_ROLE = "roles/editor"
+"""Generic role implying rights to modify an object."""
+
+VIEWER_ROLE = "roles/viewer"
+"""Generic role implying rights to access an object."""
+
+_ASSIGNMENT_DEPRECATED_MSG = """\
+Assigning to '{}' is deprecated. Use the `policy.bindings` property to modify bindings instead."""
+
+_DICT_ACCESS_MSG = """\
+Dict access is not supported on policies with version > 1 or with conditional bindings."""
+
+
+class InvalidOperationException(Exception):
+ """Raised when trying to use Policy class as a dict."""
+
+ pass
+
+
+class Policy(collections.abc.MutableMapping):
+ """IAM Policy
+
+ Args:
+ etag (Optional[str]): ETag used to identify a unique of the policy
+ version (Optional[int]): The syntax schema version of the policy.
+
+ Note:
+ Using conditions in bindings requires the policy's version to be set
+ to `3` or greater, depending on the versions that are currently supported.
+
+ Accessing the policy using dict operations will raise InvalidOperationException
+ when the policy's version is set to 3.
+
+ Use the policy.bindings getter/setter to retrieve and modify the policy's bindings.
+
+ See:
+ IAM Policy https://cloud.google.com/iam/reference/rest/v1/Policy
+ Policy versions https://cloud.google.com/iam/docs/policies#versions
+ Conditions overview https://cloud.google.com/iam/docs/conditions-overview.
+ """
+
+ _OWNER_ROLES = (OWNER_ROLE,)
+ """Roles mapped onto our ``owners`` attribute."""
+
+ _EDITOR_ROLES = (EDITOR_ROLE,)
+ """Roles mapped onto our ``editors`` attribute."""
+
+ _VIEWER_ROLES = (VIEWER_ROLE,)
+ """Roles mapped onto our ``viewers`` attribute."""
+
+ def __init__(self, etag=None, version=None):
+ self.etag = etag
+ self.version = version
+ self._bindings = []
+
+ def __iter__(self):
+ self.__check_version__()
+ # Exclude bindings with no members
+ return (binding["role"] for binding in self._bindings if binding["members"])
+
+ def __len__(self):
+ self.__check_version__()
+ # Exclude bindings with no members
+ return len(list(self.__iter__()))
+
+ def __getitem__(self, key):
+ self.__check_version__()
+ for b in self._bindings:
+ if b["role"] == key:
+ return b["members"]
+ # If the binding does not yet exist, create one
+ # NOTE: This will create bindings with no members
+ # which are ignored by __iter__ and __len__
+ new_binding = {"role": key, "members": set()}
+ self._bindings.append(new_binding)
+ return new_binding["members"]
+
+ def __setitem__(self, key, value):
+ self.__check_version__()
+ value = set(value)
+ for binding in self._bindings:
+ if binding["role"] == key:
+ binding["members"] = value
+ return
+ self._bindings.append({"role": key, "members": value})
+
+ def __delitem__(self, key):
+ self.__check_version__()
+ for b in self._bindings:
+ if b["role"] == key:
+ self._bindings.remove(b)
+ return
+ raise KeyError(key)
+
+ def __check_version__(self):
+ """Raise InvalidOperationException if version is greater than 1 or policy contains conditions."""
+ raise_version = self.version is not None and self.version > 1
+
+ if raise_version or self._contains_conditions():
+ raise InvalidOperationException(_DICT_ACCESS_MSG)
+
+ def _contains_conditions(self):
+ for b in self._bindings:
+ if b.get("condition") is not None:
+ return True
+ return False
+
+ @property
+ def bindings(self):
+ """The policy's list of bindings.
+
+ A binding is specified by a dictionary with keys:
+
+ * role (str): Role that is assigned to `members`.
+
+ * members (:obj:`set` of str): Specifies the identities associated to this binding.
+
+ * condition (:obj:`dict` of str:str): Specifies a condition under which this binding will apply.
+
+ * title (str): Title for the condition.
+
+ * description (:obj:str, optional): Description of the condition.
+
+ * expression: A CEL expression.
+
+ Type:
+ :obj:`list` of :obj:`dict`
+
+ See:
+ Policy versions https://cloud.google.com/iam/docs/policies#versions
+ Conditions overview https://cloud.google.com/iam/docs/conditions-overview.
+
+ Example:
+
+ .. code-block:: python
+
+ USER = "user:phred@example.com"
+ ADMIN_GROUP = "group:admins@groups.example.com"
+ SERVICE_ACCOUNT = "serviceAccount:account-1234@accounts.example.com"
+ CONDITION = {
+ "title": "request_time",
+ "description": "Requests made before 2021-01-01T00:00:00Z", # Optional
+ "expression": "request.time < timestamp(\"2021-01-01T00:00:00Z\")"
+ }
+
+ # Set policy's version to 3 before setting bindings containing conditions.
+ policy.version = 3
+
+ policy.bindings = [
+ {
+ "role": "roles/viewer",
+ "members": {USER, ADMIN_GROUP, SERVICE_ACCOUNT},
+ "condition": CONDITION
+ },
+ ...
+ ]
+ """
+ return self._bindings
+
+ @bindings.setter
+ def bindings(self, bindings):
+ self._bindings = bindings
+
+ @property
+ def owners(self):
+ """Legacy access to owner role.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to access bindings instead.
+ """
+ result = set()
+ for role in self._OWNER_ROLES:
+ for member in self.get(role, ()):
+ result.add(member)
+ return frozenset(result)
+
+ @owners.setter
+ def owners(self, value):
+ """Update owners.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to access bindings instead.
+ """
+ warnings.warn(
+ _ASSIGNMENT_DEPRECATED_MSG.format("owners", OWNER_ROLE), DeprecationWarning
+ )
+ self[OWNER_ROLE] = value
+
+ @property
+ def editors(self):
+ """Legacy access to editor role.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to access bindings instead.
+ """
+ result = set()
+ for role in self._EDITOR_ROLES:
+ for member in self.get(role, ()):
+ result.add(member)
+ return frozenset(result)
+
+ @editors.setter
+ def editors(self, value):
+ """Update editors.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to modify bindings instead.
+ """
+ warnings.warn(
+ _ASSIGNMENT_DEPRECATED_MSG.format("editors", EDITOR_ROLE),
+ DeprecationWarning,
+ )
+ self[EDITOR_ROLE] = value
+
+ @property
+ def viewers(self):
+ """Legacy access to viewer role.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to modify bindings instead.
+ """
+ result = set()
+ for role in self._VIEWER_ROLES:
+ for member in self.get(role, ()):
+ result.add(member)
+ return frozenset(result)
+
+ @viewers.setter
+ def viewers(self, value):
+ """Update viewers.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to modify bindings instead.
+ """
+ warnings.warn(
+ _ASSIGNMENT_DEPRECATED_MSG.format("viewers", VIEWER_ROLE),
+ DeprecationWarning,
+ )
+ self[VIEWER_ROLE] = value
+
+ @staticmethod
+ def user(email):
+ """Factory method for a user member.
+
+ Args:
+ email (str): E-mail for this particular user.
+
+ Returns:
+ str: A member string corresponding to the given user.
+ """
+ return "user:%s" % (email,)
+
+ @staticmethod
+ def service_account(email):
+ """Factory method for a service account member.
+
+ Args:
+ email (str): E-mail for this particular service account.
+
+ Returns:
+ str: A member string corresponding to the given service account.
+
+ """
+ return "serviceAccount:%s" % (email,)
+
+ @staticmethod
+ def group(email):
+ """Factory method for a group member.
+
+ Args:
+ email (str): An id or e-mail for this particular group.
+
+ Returns:
+ str: A member string corresponding to the given group.
+ """
+ return "group:%s" % (email,)
+
+ @staticmethod
+ def domain(domain):
+ """Factory method for a domain member.
+
+ Args:
+ domain (str): The domain for this member.
+
+ Returns:
+ str: A member string corresponding to the given domain.
+ """
+ return "domain:%s" % (domain,)
+
+ @staticmethod
+ def all_users():
+ """Factory method for a member representing all users.
+
+ Returns:
+ str: A member string representing all users.
+ """
+ return "allUsers"
+
+ @staticmethod
+ def authenticated_users():
+ """Factory method for a member representing all authenticated users.
+
+ Returns:
+ str: A member string representing all authenticated users.
+ """
+ return "allAuthenticatedUsers"
+
+ @classmethod
+ def from_api_repr(cls, resource):
+ """Factory: create a policy from a JSON resource.
+
+ Args:
+ resource (dict): policy resource returned by ``getIamPolicy`` API.
+
+ Returns:
+ :class:`Policy`: the parsed policy
+ """
+ version = resource.get("version")
+ etag = resource.get("etag")
+ policy = cls(etag, version)
+ policy.bindings = resource.get("bindings", [])
+
+ for binding in policy.bindings:
+ binding["members"] = set(binding.get("members", ()))
+
+ return policy
+
+ def to_api_repr(self):
+ """Render a JSON policy resource.
+
+ Returns:
+ dict: a resource to be passed to the ``setIamPolicy`` API.
+ """
+ resource = {}
+
+ if self.etag is not None:
+ resource["etag"] = self.etag
+
+ if self.version is not None:
+ resource["version"] = self.version
+
+ if self._bindings and len(self._bindings) > 0:
+ bindings = []
+ for binding in self._bindings:
+ members = binding.get("members")
+ if members:
+ new_binding = {"role": binding["role"], "members": sorted(members)}
+ condition = binding.get("condition")
+ if condition:
+ new_binding["condition"] = condition
+ bindings.append(new_binding)
+
+ if bindings:
+ # Sort bindings by role
+ key = operator.itemgetter("role")
+ resource["bindings"] = sorted(bindings, key=key)
+
+ return resource
diff --git a/Lib/site-packages/google/api_core/operation.py b/Lib/site-packages/google/api_core/operation.py
new file mode 100644
index 0000000..4b9c9a5
--- /dev/null
+++ b/Lib/site-packages/google/api_core/operation.py
@@ -0,0 +1,365 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Futures for long-running operations returned from Google Cloud APIs.
+
+These futures can be used to synchronously wait for the result of a
+long-running operation using :meth:`Operation.result`:
+
+
+.. code-block:: python
+
+ operation = my_api_client.long_running_method()
+ result = operation.result()
+
+Or asynchronously using callbacks and :meth:`Operation.add_done_callback`:
+
+.. code-block:: python
+
+ operation = my_api_client.long_running_method()
+
+ def my_callback(future):
+ result = future.result()
+
+ operation.add_done_callback(my_callback)
+
+"""
+
+import functools
+import threading
+
+from google.api_core import exceptions
+from google.api_core import protobuf_helpers
+from google.api_core.future import polling
+from google.longrunning import operations_pb2
+from google.protobuf import json_format
+from google.rpc import code_pb2
+
+
+class Operation(polling.PollingFuture):
+ """A Future for interacting with a Google API Long-Running Operation.
+
+ Args:
+ operation (google.longrunning.operations_pb2.Operation): The
+ initial operation.
+ refresh (Callable[[], ~.api_core.operation.Operation]): A callable that
+ returns the latest state of the operation.
+ cancel (Callable[[], None]): A callable that tries to cancel
+ the operation.
+ result_type (func:`type`): The protobuf type for the operation's
+ result.
+ metadata_type (func:`type`): The protobuf type for the operation's
+ metadata.
+ polling (google.api_core.retry.Retry): The configuration used for polling.
+ This parameter controls how often :meth:`done` is polled. If the
+ ``timeout`` argument is specified in the :meth:`result` method, it will
+ override the ``polling.timeout`` property.
+ retry (google.api_core.retry.Retry): DEPRECATED: use ``polling`` instead.
+ If specified it will override ``polling`` parameter to maintain
+ backward compatibility.
+ """
+
+ def __init__(
+ self,
+ operation,
+ refresh,
+ cancel,
+ result_type,
+ metadata_type=None,
+ polling=polling.DEFAULT_POLLING,
+ **kwargs
+ ):
+ super(Operation, self).__init__(polling=polling, **kwargs)
+ self._operation = operation
+ self._refresh = refresh
+ self._cancel = cancel
+ self._result_type = result_type
+ self._metadata_type = metadata_type
+ self._completion_lock = threading.Lock()
+ # Invoke this in case the operation came back already complete.
+ self._set_result_from_operation()
+
+ @property
+ def operation(self):
+ """google.longrunning.Operation: The current long-running operation."""
+ return self._operation
+
+ @property
+ def metadata(self):
+ """google.protobuf.Message: the current operation metadata."""
+ if not self._operation.HasField("metadata"):
+ return None
+
+ return protobuf_helpers.from_any_pb(
+ self._metadata_type, self._operation.metadata
+ )
+
+ @classmethod
+ def deserialize(self, payload):
+ """Deserialize a ``google.longrunning.Operation`` protocol buffer.
+
+ Args:
+ payload (bytes): A serialized operation protocol buffer.
+
+ Returns:
+ ~.operations_pb2.Operation: An Operation protobuf object.
+ """
+ return operations_pb2.Operation.FromString(payload)
+
+ def _set_result_from_operation(self):
+ """Set the result or exception from the operation if it is complete."""
+ # This must be done in a lock to prevent the polling thread
+ # and main thread from both executing the completion logic
+ # at the same time.
+ with self._completion_lock:
+ # If the operation isn't complete or if the result has already been
+ # set, do not call set_result/set_exception again.
+ # Note: self._result_set is set to True in set_result and
+ # set_exception, in case those methods are invoked directly.
+ if not self._operation.done or self._result_set:
+ return
+
+ if self._operation.HasField("response"):
+ response = protobuf_helpers.from_any_pb(
+ self._result_type, self._operation.response
+ )
+ self.set_result(response)
+ elif self._operation.HasField("error"):
+ exception = exceptions.from_grpc_status(
+ status_code=self._operation.error.code,
+ message=self._operation.error.message,
+ errors=(self._operation.error,),
+ response=self._operation,
+ )
+ self.set_exception(exception)
+ else:
+ exception = exceptions.GoogleAPICallError(
+ "Unexpected state: Long-running operation had neither "
+ "response nor error set."
+ )
+ self.set_exception(exception)
+
+ def _refresh_and_update(self, retry=None):
+ """Refresh the operation and update the result if needed.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+ """
+ # If the currently cached operation is done, no need to make another
+ # RPC as it will not change once done.
+ if not self._operation.done:
+ self._operation = self._refresh(retry=retry) if retry else self._refresh()
+ self._set_result_from_operation()
+
+ def done(self, retry=None):
+ """Checks to see if the operation is complete.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+
+ Returns:
+ bool: True if the operation is complete, False otherwise.
+ """
+ self._refresh_and_update(retry)
+ return self._operation.done
+
+ def cancel(self):
+ """Attempt to cancel the operation.
+
+ Returns:
+ bool: True if the cancel RPC was made, False if the operation is
+ already complete.
+ """
+ if self.done():
+ return False
+
+ self._cancel()
+ return True
+
+ def cancelled(self):
+ """True if the operation was cancelled."""
+ self._refresh_and_update()
+ return (
+ self._operation.HasField("error")
+ and self._operation.error.code == code_pb2.CANCELLED
+ )
+
+
+def _refresh_http(api_request, operation_name, retry=None):
+ """Refresh an operation using a JSON/HTTP client.
+
+ Args:
+ api_request (Callable): A callable used to make an API request. This
+ should generally be
+ :meth:`google.cloud._http.Connection.api_request`.
+ operation_name (str): The name of the operation.
+ retry (google.api_core.retry.Retry): (Optional) retry policy
+
+ Returns:
+ google.longrunning.operations_pb2.Operation: The operation.
+ """
+ path = "operations/{}".format(operation_name)
+
+ if retry is not None:
+ api_request = retry(api_request)
+
+ api_response = api_request(method="GET", path=path)
+ return json_format.ParseDict(api_response, operations_pb2.Operation())
+
+
+def _cancel_http(api_request, operation_name):
+ """Cancel an operation using a JSON/HTTP client.
+
+ Args:
+ api_request (Callable): A callable used to make an API request. This
+ should generally be
+ :meth:`google.cloud._http.Connection.api_request`.
+ operation_name (str): The name of the operation.
+ """
+ path = "operations/{}:cancel".format(operation_name)
+ api_request(method="POST", path=path)
+
+
+def from_http_json(operation, api_request, result_type, **kwargs):
+ """Create an operation future using a HTTP/JSON client.
+
+ This interacts with the long-running operations `service`_ (specific
+ to a given API) via `HTTP/JSON`_.
+
+ .. _HTTP/JSON: https://cloud.google.com/speech/reference/rest/\
+ v1beta1/operations#Operation
+
+ Args:
+ operation (dict): Operation as a dictionary.
+ api_request (Callable): A callable used to make an API request. This
+ should generally be
+ :meth:`google.cloud._http.Connection.api_request`.
+ result_type (:func:`type`): The protobuf result type.
+ kwargs: Keyword args passed into the :class:`Operation` constructor.
+
+ Returns:
+ ~.api_core.operation.Operation: The operation future to track the given
+ operation.
+ """
+ operation_proto = json_format.ParseDict(operation, operations_pb2.Operation())
+ refresh = functools.partial(_refresh_http, api_request, operation_proto.name)
+ cancel = functools.partial(_cancel_http, api_request, operation_proto.name)
+ return Operation(operation_proto, refresh, cancel, result_type, **kwargs)
+
+
+def _refresh_grpc(operations_stub, operation_name, retry=None):
+ """Refresh an operation using a gRPC client.
+
+ Args:
+ operations_stub (google.longrunning.operations_pb2.OperationsStub):
+ The gRPC operations stub.
+ operation_name (str): The name of the operation.
+ retry (google.api_core.retry.Retry): (Optional) retry policy
+
+ Returns:
+ google.longrunning.operations_pb2.Operation: The operation.
+ """
+ request_pb = operations_pb2.GetOperationRequest(name=operation_name)
+
+ rpc = operations_stub.GetOperation
+ if retry is not None:
+ rpc = retry(rpc)
+
+ return rpc(request_pb)
+
+
+def _cancel_grpc(operations_stub, operation_name):
+ """Cancel an operation using a gRPC client.
+
+ Args:
+ operations_stub (google.longrunning.operations_pb2.OperationsStub):
+ The gRPC operations stub.
+ operation_name (str): The name of the operation.
+ """
+ request_pb = operations_pb2.CancelOperationRequest(name=operation_name)
+ operations_stub.CancelOperation(request_pb)
+
+
+def from_grpc(operation, operations_stub, result_type, grpc_metadata=None, **kwargs):
+ """Create an operation future using a gRPC client.
+
+ This interacts with the long-running operations `service`_ (specific
+ to a given API) via gRPC.
+
+ .. _service: https://github.com/googleapis/googleapis/blob/\
+ 050400df0fdb16f63b63e9dee53819044bffc857/\
+ google/longrunning/operations.proto#L38
+
+ Args:
+ operation (google.longrunning.operations_pb2.Operation): The operation.
+ operations_stub (google.longrunning.operations_pb2.OperationsStub):
+ The operations stub.
+ result_type (:func:`type`): The protobuf result type.
+ grpc_metadata (Optional[List[Tuple[str, str]]]): Additional metadata to pass
+ to the rpc.
+ kwargs: Keyword args passed into the :class:`Operation` constructor.
+
+ Returns:
+ ~.api_core.operation.Operation: The operation future to track the given
+ operation.
+ """
+ refresh = functools.partial(
+ _refresh_grpc,
+ operations_stub,
+ operation.name,
+ metadata=grpc_metadata,
+ )
+ cancel = functools.partial(
+ _cancel_grpc,
+ operations_stub,
+ operation.name,
+ metadata=grpc_metadata,
+ )
+ return Operation(operation, refresh, cancel, result_type, **kwargs)
+
+
+def from_gapic(operation, operations_client, result_type, grpc_metadata=None, **kwargs):
+ """Create an operation future from a gapic client.
+
+ This interacts with the long-running operations `service`_ (specific
+ to a given API) via a gapic client.
+
+ .. _service: https://github.com/googleapis/googleapis/blob/\
+ 050400df0fdb16f63b63e9dee53819044bffc857/\
+ google/longrunning/operations.proto#L38
+
+ Args:
+ operation (google.longrunning.operations_pb2.Operation): The operation.
+ operations_client (google.api_core.operations_v1.OperationsClient):
+ The operations client.
+ result_type (:func:`type`): The protobuf result type.
+ grpc_metadata (Optional[List[Tuple[str, str]]]): Additional metadata to pass
+ to the rpc.
+ kwargs: Keyword args passed into the :class:`Operation` constructor.
+
+ Returns:
+ ~.api_core.operation.Operation: The operation future to track the given
+ operation.
+ """
+ refresh = functools.partial(
+ operations_client.get_operation,
+ operation.name,
+ metadata=grpc_metadata,
+ )
+ cancel = functools.partial(
+ operations_client.cancel_operation,
+ operation.name,
+ metadata=grpc_metadata,
+ )
+ return Operation(operation, refresh, cancel, result_type, **kwargs)
diff --git a/Lib/site-packages/google/api_core/operation_async.py b/Lib/site-packages/google/api_core/operation_async.py
new file mode 100644
index 0000000..2fd341d
--- /dev/null
+++ b/Lib/site-packages/google/api_core/operation_async.py
@@ -0,0 +1,225 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""AsyncIO futures for long-running operations returned from Google Cloud APIs.
+
+These futures can be used to await for the result of a long-running operation
+using :meth:`AsyncOperation.result`:
+
+
+.. code-block:: python
+
+ operation = my_api_client.long_running_method()
+ result = await operation.result()
+
+Or asynchronously using callbacks and :meth:`Operation.add_done_callback`:
+
+.. code-block:: python
+
+ operation = my_api_client.long_running_method()
+
+ def my_callback(future):
+ result = await future.result()
+
+ operation.add_done_callback(my_callback)
+
+"""
+
+import functools
+import threading
+
+from google.api_core import exceptions
+from google.api_core import protobuf_helpers
+from google.api_core.future import async_future
+from google.longrunning import operations_pb2
+from google.rpc import code_pb2
+
+
+class AsyncOperation(async_future.AsyncFuture):
+ """A Future for interacting with a Google API Long-Running Operation.
+
+ Args:
+ operation (google.longrunning.operations_pb2.Operation): The
+ initial operation.
+ refresh (Callable[[], ~.api_core.operation.Operation]): A callable that
+ returns the latest state of the operation.
+ cancel (Callable[[], None]): A callable that tries to cancel
+ the operation.
+ result_type (func:`type`): The protobuf type for the operation's
+ result.
+ metadata_type (func:`type`): The protobuf type for the operation's
+ metadata.
+ retry (google.api_core.retry.Retry): The retry configuration used
+ when polling. This can be used to control how often :meth:`done`
+ is polled. Regardless of the retry's ``deadline``, it will be
+ overridden by the ``timeout`` argument to :meth:`result`.
+ """
+
+ def __init__(
+ self,
+ operation,
+ refresh,
+ cancel,
+ result_type,
+ metadata_type=None,
+ retry=async_future.DEFAULT_RETRY,
+ ):
+ super().__init__(retry=retry)
+ self._operation = operation
+ self._refresh = refresh
+ self._cancel = cancel
+ self._result_type = result_type
+ self._metadata_type = metadata_type
+ self._completion_lock = threading.Lock()
+ # Invoke this in case the operation came back already complete.
+ self._set_result_from_operation()
+
+ @property
+ def operation(self):
+ """google.longrunning.Operation: The current long-running operation."""
+ return self._operation
+
+ @property
+ def metadata(self):
+ """google.protobuf.Message: the current operation metadata."""
+ if not self._operation.HasField("metadata"):
+ return None
+
+ return protobuf_helpers.from_any_pb(
+ self._metadata_type, self._operation.metadata
+ )
+
+ @classmethod
+ def deserialize(cls, payload):
+ """Deserialize a ``google.longrunning.Operation`` protocol buffer.
+
+ Args:
+ payload (bytes): A serialized operation protocol buffer.
+
+ Returns:
+ ~.operations_pb2.Operation: An Operation protobuf object.
+ """
+ return operations_pb2.Operation.FromString(payload)
+
+ def _set_result_from_operation(self):
+ """Set the result or exception from the operation if it is complete."""
+ # This must be done in a lock to prevent the async_future thread
+ # and main thread from both executing the completion logic
+ # at the same time.
+ with self._completion_lock:
+ # If the operation isn't complete or if the result has already been
+ # set, do not call set_result/set_exception again.
+ if not self._operation.done or self._future.done():
+ return
+
+ if self._operation.HasField("response"):
+ response = protobuf_helpers.from_any_pb(
+ self._result_type, self._operation.response
+ )
+ self.set_result(response)
+ elif self._operation.HasField("error"):
+ exception = exceptions.GoogleAPICallError(
+ self._operation.error.message,
+ errors=(self._operation.error,),
+ response=self._operation,
+ )
+ self.set_exception(exception)
+ else:
+ exception = exceptions.GoogleAPICallError(
+ "Unexpected state: Long-running operation had neither "
+ "response nor error set."
+ )
+ self.set_exception(exception)
+
+ async def _refresh_and_update(self, retry=async_future.DEFAULT_RETRY):
+ """Refresh the operation and update the result if needed.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+ """
+ # If the currently cached operation is done, no need to make another
+ # RPC as it will not change once done.
+ if not self._operation.done:
+ self._operation = await self._refresh(retry=retry)
+ self._set_result_from_operation()
+
+ async def done(self, retry=async_future.DEFAULT_RETRY):
+ """Checks to see if the operation is complete.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+
+ Returns:
+ bool: True if the operation is complete, False otherwise.
+ """
+ await self._refresh_and_update(retry)
+ return self._operation.done
+
+ async def cancel(self):
+ """Attempt to cancel the operation.
+
+ Returns:
+ bool: True if the cancel RPC was made, False if the operation is
+ already complete.
+ """
+ result = await self.done()
+ if result:
+ return False
+ else:
+ await self._cancel()
+ return True
+
+ async def cancelled(self):
+ """True if the operation was cancelled."""
+ await self._refresh_and_update()
+ return (
+ self._operation.HasField("error")
+ and self._operation.error.code == code_pb2.CANCELLED
+ )
+
+
+def from_gapic(operation, operations_client, result_type, grpc_metadata=None, **kwargs):
+ """Create an operation future from a gapic client.
+
+ This interacts with the long-running operations `service`_ (specific
+ to a given API) via a gapic client.
+
+ .. _service: https://github.com/googleapis/googleapis/blob/\
+ 050400df0fdb16f63b63e9dee53819044bffc857/\
+ google/longrunning/operations.proto#L38
+
+ Args:
+ operation (google.longrunning.operations_pb2.Operation): The operation.
+ operations_client (google.api_core.operations_v1.OperationsClient):
+ The operations client.
+ result_type (:func:`type`): The protobuf result type.
+ grpc_metadata (Optional[List[Tuple[str, str]]]): Additional metadata to pass
+ to the rpc.
+ kwargs: Keyword args passed into the :class:`Operation` constructor.
+
+ Returns:
+ ~.api_core.operation.Operation: The operation future to track the given
+ operation.
+ """
+ refresh = functools.partial(
+ operations_client.get_operation,
+ operation.name,
+ metadata=grpc_metadata,
+ )
+ cancel = functools.partial(
+ operations_client.cancel_operation,
+ operation.name,
+ metadata=grpc_metadata,
+ )
+ return AsyncOperation(operation, refresh, cancel, result_type, **kwargs)
diff --git a/Lib/site-packages/google/api_core/operations_v1/__init__.py b/Lib/site-packages/google/api_core/operations_v1/__init__.py
new file mode 100644
index 0000000..6118645
--- /dev/null
+++ b/Lib/site-packages/google/api_core/operations_v1/__init__.py
@@ -0,0 +1,27 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Package for interacting with the google.longrunning.operations meta-API."""
+
+from google.api_core.operations_v1.abstract_operations_client import AbstractOperationsClient
+from google.api_core.operations_v1.operations_async_client import OperationsAsyncClient
+from google.api_core.operations_v1.operations_client import OperationsClient
+from google.api_core.operations_v1.transports.rest import OperationsRestTransport
+
+__all__ = [
+ "AbstractOperationsClient",
+ "OperationsAsyncClient",
+ "OperationsClient",
+ "OperationsRestTransport"
+]
diff --git a/Lib/site-packages/google/api_core/operations_v1/abstract_operations_client.py b/Lib/site-packages/google/api_core/operations_v1/abstract_operations_client.py
new file mode 100644
index 0000000..38f532a
--- /dev/null
+++ b/Lib/site-packages/google/api_core/operations_v1/abstract_operations_client.py
@@ -0,0 +1,613 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+import os
+import re
+from typing import Dict, Optional, Sequence, Tuple, Type, Union
+
+from google.api_core import client_options as client_options_lib # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.api_core.operations_v1 import pagers
+from google.api_core.operations_v1.transports.base import (
+ DEFAULT_CLIENT_INFO,
+ OperationsTransport,
+)
+from google.api_core.operations_v1.transports.rest import OperationsRestTransport
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
+from google.auth.transport import mtls # type: ignore
+from google.longrunning import operations_pb2
+from google.oauth2 import service_account # type: ignore
+import grpc
+
+OptionalRetry = Union[retries.Retry, object]
+
+
+class AbstractOperationsClientMeta(type):
+ """Metaclass for the Operations client.
+
+ This provides class-level methods for building and retrieving
+ support objects (e.g. transport) without polluting the client instance
+ objects.
+ """
+
+ _transport_registry = OrderedDict() # type: Dict[str, Type[OperationsTransport]]
+ _transport_registry["rest"] = OperationsRestTransport
+
+ def get_transport_class(
+ cls,
+ label: Optional[str] = None,
+ ) -> Type[OperationsTransport]:
+ """Returns an appropriate transport class.
+
+ Args:
+ label: The name of the desired transport. If none is
+ provided, then the first transport in the registry is used.
+
+ Returns:
+ The transport class to use.
+ """
+ # If a specific transport is requested, return that one.
+ if label:
+ return cls._transport_registry[label]
+
+ # No transport is requested; return the default (that is, the first one
+ # in the dictionary).
+ return next(iter(cls._transport_registry.values()))
+
+
+class AbstractOperationsClient(metaclass=AbstractOperationsClientMeta):
+ """Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+ """
+
+ @staticmethod
+ def _get_default_mtls_endpoint(api_endpoint):
+ """Converts api endpoint to mTLS endpoint.
+
+ Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+ "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+ Args:
+ api_endpoint (Optional[str]): the api endpoint to convert.
+ Returns:
+ str: converted mTLS api endpoint.
+ """
+ if not api_endpoint:
+ return api_endpoint
+
+ mtls_endpoint_re = re.compile(
+ r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?"
+ )
+
+ m = mtls_endpoint_re.match(api_endpoint)
+ name, mtls, sandbox, googledomain = m.groups()
+ if mtls or not googledomain:
+ return api_endpoint
+
+ if sandbox:
+ return api_endpoint.replace(
+ "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+ )
+
+ return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+ DEFAULT_ENDPOINT = "longrunning.googleapis.com"
+ DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
+ DEFAULT_ENDPOINT
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ AbstractOperationsClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ AbstractOperationsClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_file(filename)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ from_service_account_json = from_service_account_file
+
+ @property
+ def transport(self) -> OperationsTransport:
+ """Returns the transport used by the client instance.
+
+ Returns:
+ OperationsTransport: The transport used by the client
+ instance.
+ """
+ return self._transport
+
+ @staticmethod
+ def common_billing_account_path(
+ billing_account: str,
+ ) -> str:
+ """Returns a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(
+ folder: str,
+ ) -> str:
+ """Returns a fully-qualified folder string."""
+ return "folders/{folder}".format(
+ folder=folder,
+ )
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(
+ organization: str,
+ ) -> str:
+ """Returns a fully-qualified organization string."""
+ return "organizations/{organization}".format(
+ organization=organization,
+ )
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(
+ project: str,
+ ) -> str:
+ """Returns a fully-qualified project string."""
+ return "projects/{project}".format(
+ project=project,
+ )
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(
+ project: str,
+ location: str,
+ ) -> str:
+ """Returns a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project,
+ location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ def __init__(
+ self,
+ *,
+ credentials: Optional[ga_credentials.Credentials] = None,
+ transport: Union[str, OperationsTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiates the operations client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, OperationsTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ if isinstance(client_options, dict):
+ client_options = client_options_lib.from_dict(client_options)
+ if client_options is None:
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = os.getenv(
+ "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"
+ ).lower()
+ if use_client_cert not in ("true", "false"):
+ raise ValueError(
+ "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+ )
+ client_cert_source_func = None
+ is_mtls = False
+ if use_client_cert == "true":
+ if client_options.client_cert_source:
+ is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
+ else:
+ is_mtls = mtls.has_default_client_cert_source()
+ if is_mtls:
+ client_cert_source_func = mtls.default_client_cert_source()
+ else:
+ client_cert_source_func = None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+ if use_mtls_env == "never":
+ api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ if is_mtls:
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ else:
+ api_endpoint = self.DEFAULT_ENDPOINT
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
+ "values: never, auto, always"
+ )
+
+ # Save or instantiate the transport.
+ # Ordinarily, we provide the transport, but allowing a custom transport
+ # instance provides an extensibility point for unusual situations.
+ if isinstance(transport, OperationsTransport):
+ # transport is a OperationsTransport instance.
+ if credentials or client_options.credentials_file:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its credentials directly."
+ )
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, provide its scopes "
+ "directly."
+ )
+ self._transport = transport
+ else:
+ Transport = type(self).get_transport_class(transport)
+ self._transport = Transport(
+ credentials=credentials,
+ credentials_file=client_options.credentials_file,
+ host=api_endpoint,
+ scopes=client_options.scopes,
+ client_cert_source_for_mtls=client_cert_source_func,
+ quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
+ always_use_jwt_access=True,
+ )
+
+ def list_operations(
+ self,
+ name: str,
+ filter_: Optional[str] = None,
+ *,
+ page_size: Optional[int] = None,
+ page_token: Optional[str] = None,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListOperationsPager:
+ r"""Lists operations that match the specified filter in the request.
+ If the server doesn't support this method, it returns
+ ``UNIMPLEMENTED``.
+
+ NOTE: the ``name`` binding allows API services to override the
+ binding to use different resource name schemes, such as
+ ``users/*/operations``. To override the binding, API services
+ can add a binding such as ``"/v1/{name=users/*}/operations"`` to
+ their service configuration. For backwards compatibility, the
+ default name includes the operations collection id, however
+ overriding users must ensure the name binding is the parent
+ resource, without the operations collection id.
+
+ Args:
+ name (str):
+ The name of the operation's parent
+ resource.
+ filter_ (str):
+ The standard list filter.
+ This corresponds to the ``filter`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.api_core.operations_v1.pagers.ListOperationsPager:
+ The response message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create a protobuf request object.
+ request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
+ if page_size is not None:
+ request.page_size = page_size
+ if page_token is not None:
+ request.page_token = page_token
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.list_operations]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.ListOperationsPager(
+ method=rpc,
+ request=request,
+ response=response,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def get_operation(
+ self,
+ name: str,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.Operation:
+ r"""Gets the latest state of a long-running operation.
+ Clients can use this method to poll the operation result
+ at intervals as recommended by the API service.
+
+ Args:
+ name (str):
+ The name of the operation resource.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.longrunning.operations_pb2.Operation:
+ This resource represents a long-
+ running operation that is the result of a
+ network API call.
+
+ """
+
+ request = operations_pb2.GetOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.get_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def delete_operation(
+ self,
+ name: str,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes a long-running operation. This method indicates that the
+ client is no longer interested in the operation result. It does
+ not cancel the operation. If the server doesn't support this
+ method, it returns ``google.rpc.Code.UNIMPLEMENTED``.
+
+ Args:
+ name (str):
+ The name of the operation resource to
+ be deleted.
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create the request object.
+ request = operations_pb2.DeleteOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.delete_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
+
+ def cancel_operation(
+ self,
+ name: Optional[str] = None,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Starts asynchronous cancellation on a long-running operation.
+ The server makes a best effort to cancel the operation, but
+ success is not guaranteed. If the server doesn't support this
+ method, it returns ``google.rpc.Code.UNIMPLEMENTED``. Clients
+ can use
+ [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation]
+ or other methods to check whether the cancellation succeeded or
+ whether the operation completed despite cancellation. On
+ successful cancellation, the operation is not deleted; instead,
+ it becomes an operation with an
+ [Operation.error][google.api_core.operations_v1.Operation.error] value with
+ a [google.rpc.Status.code][google.rpc.Status.code] of 1,
+ corresponding to ``Code.CANCELLED``.
+
+ Args:
+ name (str):
+ The name of the operation resource to
+ be cancelled.
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create the request object.
+ request = operations_pb2.CancelOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.cancel_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
diff --git a/Lib/site-packages/google/api_core/operations_v1/operations_async_client.py b/Lib/site-packages/google/api_core/operations_v1/operations_async_client.py
new file mode 100644
index 0000000..a60c717
--- /dev/null
+++ b/Lib/site-packages/google/api_core/operations_v1/operations_async_client.py
@@ -0,0 +1,364 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""An async client for the google.longrunning.operations meta-API.
+
+.. _Google API Style Guide:
+ https://cloud.google.com/apis/design/design_pattern
+ s#long_running_operations
+.. _google/longrunning/operations.proto:
+ https://github.com/googleapis/googleapis/blob/master/google/longrunning
+ /operations.proto
+"""
+
+import functools
+
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1, page_iterator_async
+from google.api_core import retry_async as retries
+from google.api_core import timeout as timeouts
+from google.longrunning import operations_pb2
+from grpc import Compression
+
+
+class OperationsAsyncClient:
+ """Async client for interacting with long-running operations.
+
+ Args:
+ channel (aio.Channel): The gRPC AsyncIO channel associated with the
+ service that implements the ``google.longrunning.operations``
+ interface.
+ client_config (dict):
+ A dictionary of call options for each method. If not specified
+ the default configuration is used.
+ """
+
+ def __init__(self, channel, client_config=None):
+ # Create the gRPC client stub with gRPC AsyncIO channel.
+ self.operations_stub = operations_pb2.OperationsStub(channel)
+
+ default_retry = retries.AsyncRetry(
+ initial=0.1, # seconds
+ maximum=60.0, # seconds
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
+ ),
+ timeout=600.0, # seconds
+ )
+ default_timeout = timeouts.TimeToDeadlineTimeout(timeout=600.0)
+
+ default_compression = Compression.NoCompression
+
+ self._get_operation = gapic_v1.method_async.wrap_method(
+ self.operations_stub.GetOperation,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
+ )
+
+ self._list_operations = gapic_v1.method_async.wrap_method(
+ self.operations_stub.ListOperations,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
+ )
+
+ self._cancel_operation = gapic_v1.method_async.wrap_method(
+ self.operations_stub.CancelOperation,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
+ )
+
+ self._delete_operation = gapic_v1.method_async.wrap_method(
+ self.operations_stub.DeleteOperation,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
+ )
+
+ async def get_operation(
+ self,
+ name,
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout=gapic_v1.method_async.DEFAULT,
+ compression=gapic_v1.method_async.DEFAULT,
+ metadata=None,
+ ):
+ """Gets the latest state of a long-running operation.
+
+ Clients can use this method to poll the operation result at intervals
+ as recommended by the API service.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> response = await api.get_operation(name)
+
+ Args:
+ name (str): The name of the operation resource.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
+ metadata (Optional[List[Tuple[str, str]]]):
+ Additional gRPC metadata.
+
+ Returns:
+ google.longrunning.operations_pb2.Operation: The state of the
+ operation.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ request = operations_pb2.GetOperationRequest(name=name)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ return await self._get_operation(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
+
+ async def list_operations(
+ self,
+ name,
+ filter_,
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout=gapic_v1.method_async.DEFAULT,
+ compression=gapic_v1.method_async.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Lists operations that match the specified filter in the request.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>>
+ >>> # Iterate over all results
+ >>> for operation in await api.list_operations(name):
+ >>> # process operation
+ >>> pass
+ >>>
+ >>> # Or iterate over results one page at a time
+ >>> iter = await api.list_operations(name)
+ >>> for page in iter.pages:
+ >>> for operation in page:
+ >>> # process operation
+ >>> pass
+
+ Args:
+ name (str): The name of the operation collection.
+ filter_ (str): The standard list filter.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
+
+ Returns:
+ google.api_core.page_iterator.Iterator: An iterator that yields
+ :class:`google.longrunning.operations_pb2.Operation` instances.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ # Create the request object.
+ request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ # Create the method used to fetch pages
+ method = functools.partial(
+ self._list_operations,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
+
+ iterator = page_iterator_async.AsyncGRPCIterator(
+ client=None,
+ method=method,
+ request=request,
+ items_field="operations",
+ request_token_field="page_token",
+ response_token_field="next_page_token",
+ )
+
+ return iterator
+
+ async def cancel_operation(
+ self,
+ name,
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout=gapic_v1.method_async.DEFAULT,
+ compression=gapic_v1.method_async.DEFAULT,
+ metadata=None,
+ ):
+ """Starts asynchronous cancellation on a long-running operation.
+
+ The server makes a best effort to cancel the operation, but success is
+ not guaranteed. Clients can use :meth:`get_operation` or service-
+ specific methods to check whether the cancellation succeeded or whether
+ the operation completed despite cancellation. On successful
+ cancellation, the operation is not deleted; instead, it becomes an
+ operation with an ``Operation.error`` value with a
+ ``google.rpc.Status.code`` of ``1``, corresponding to
+ ``Code.CANCELLED``.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> api.cancel_operation(name)
+
+ Args:
+ name (str): The name of the operation resource to be cancelled.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
+ """
+ # Create the request object.
+ request = operations_pb2.CancelOperationRequest(name=name)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ await self._cancel_operation(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
+
+ async def delete_operation(
+ self,
+ name,
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout=gapic_v1.method_async.DEFAULT,
+ compression=gapic_v1.method_async.DEFAULT,
+ metadata=None,
+ ):
+ """Deletes a long-running operation.
+
+ This method indicates that the client is no longer interested in the
+ operation result. It does not cancel the operation.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> api.delete_operation(name)
+
+ Args:
+ name (str): The name of the operation resource to be deleted.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ # Create the request object.
+ request = operations_pb2.DeleteOperationRequest(name=name)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ await self._delete_operation(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
diff --git a/Lib/site-packages/google/api_core/operations_v1/operations_client.py b/Lib/site-packages/google/api_core/operations_v1/operations_client.py
new file mode 100644
index 0000000..d1d3fd5
--- /dev/null
+++ b/Lib/site-packages/google/api_core/operations_v1/operations_client.py
@@ -0,0 +1,378 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A client for the google.longrunning.operations meta-API.
+
+This is a client that deals with long-running operations that follow the
+pattern outlined by the `Google API Style Guide`_.
+
+When an API method normally takes long time to complete, it can be designed to
+return ``Operation`` to the client, and the client can use this interface to
+receive the real response asynchronously by polling the operation resource to
+receive the response.
+
+It is not a separate service, but rather an interface implemented by a larger
+service. The protocol-level definition is available at
+`google/longrunning/operations.proto`_. Typically, this will be constructed
+automatically by another client class to deal with operations.
+
+.. _Google API Style Guide:
+ https://cloud.google.com/apis/design/design_pattern
+ s#long_running_operations
+.. _google/longrunning/operations.proto:
+ https://github.com/googleapis/googleapis/blob/master/google/longrunning
+ /operations.proto
+"""
+
+import functools
+
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import page_iterator
+from google.api_core import retry as retries
+from google.api_core import timeout as timeouts
+from google.longrunning import operations_pb2
+from grpc import Compression
+
+
+class OperationsClient(object):
+ """Client for interacting with long-running operations within a service.
+
+ Args:
+ channel (grpc.Channel): The gRPC channel associated with the service
+ that implements the ``google.longrunning.operations`` interface.
+ client_config (dict):
+ A dictionary of call options for each method. If not specified
+ the default configuration is used.
+ """
+
+ def __init__(self, channel, client_config=None):
+ # Create the gRPC client stub.
+ self.operations_stub = operations_pb2.OperationsStub(channel)
+
+ default_retry = retries.Retry(
+ initial=0.1, # seconds
+ maximum=60.0, # seconds
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
+ ),
+ timeout=600.0, # seconds
+ )
+ default_timeout = timeouts.TimeToDeadlineTimeout(timeout=600.0)
+
+ default_compression = Compression.NoCompression
+
+ self._get_operation = gapic_v1.method.wrap_method(
+ self.operations_stub.GetOperation,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
+ )
+
+ self._list_operations = gapic_v1.method.wrap_method(
+ self.operations_stub.ListOperations,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
+ )
+
+ self._cancel_operation = gapic_v1.method.wrap_method(
+ self.operations_stub.CancelOperation,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
+ )
+
+ self._delete_operation = gapic_v1.method.wrap_method(
+ self.operations_stub.DeleteOperation,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
+ )
+
+ # Service calls
+ def get_operation(
+ self,
+ name,
+ retry=gapic_v1.method.DEFAULT,
+ timeout=gapic_v1.method.DEFAULT,
+ compression=gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """Gets the latest state of a long-running operation.
+
+ Clients can use this method to poll the operation result at intervals
+ as recommended by the API service.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> response = api.get_operation(name)
+
+ Args:
+ name (str): The name of the operation resource.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
+ metadata (Optional[List[Tuple[str, str]]]):
+ Additional gRPC metadata.
+
+ Returns:
+ google.longrunning.operations_pb2.Operation: The state of the
+ operation.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ request = operations_pb2.GetOperationRequest(name=name)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ return self._get_operation(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
+
+ def list_operations(
+ self,
+ name,
+ filter_,
+ retry=gapic_v1.method.DEFAULT,
+ timeout=gapic_v1.method.DEFAULT,
+ compression=gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Lists operations that match the specified filter in the request.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>>
+ >>> # Iterate over all results
+ >>> for operation in api.list_operations(name):
+ >>> # process operation
+ >>> pass
+ >>>
+ >>> # Or iterate over results one page at a time
+ >>> iter = api.list_operations(name)
+ >>> for page in iter.pages:
+ >>> for operation in page:
+ >>> # process operation
+ >>> pass
+
+ Args:
+ name (str): The name of the operation collection.
+ filter_ (str): The standard list filter.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
+
+ Returns:
+ google.api_core.page_iterator.Iterator: An iterator that yields
+ :class:`google.longrunning.operations_pb2.Operation` instances.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ # Create the request object.
+ request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ # Create the method used to fetch pages
+ method = functools.partial(
+ self._list_operations,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
+
+ iterator = page_iterator.GRPCIterator(
+ client=None,
+ method=method,
+ request=request,
+ items_field="operations",
+ request_token_field="page_token",
+ response_token_field="next_page_token",
+ )
+
+ return iterator
+
+ def cancel_operation(
+ self,
+ name,
+ retry=gapic_v1.method.DEFAULT,
+ timeout=gapic_v1.method.DEFAULT,
+ compression=gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """Starts asynchronous cancellation on a long-running operation.
+
+ The server makes a best effort to cancel the operation, but success is
+ not guaranteed. Clients can use :meth:`get_operation` or service-
+ specific methods to check whether the cancellation succeeded or whether
+ the operation completed despite cancellation. On successful
+ cancellation, the operation is not deleted; instead, it becomes an
+ operation with an ``Operation.error`` value with a
+ ``google.rpc.Status.code`` of ``1``, corresponding to
+ ``Code.CANCELLED``.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> api.cancel_operation(name)
+
+ Args:
+ name (str): The name of the operation resource to be cancelled.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ # Create the request object.
+ request = operations_pb2.CancelOperationRequest(name=name)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ self._cancel_operation(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
+
+ def delete_operation(
+ self,
+ name,
+ retry=gapic_v1.method.DEFAULT,
+ timeout=gapic_v1.method.DEFAULT,
+ compression=gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """Deletes a long-running operation.
+
+ This method indicates that the client is no longer interested in the
+ operation result. It does not cancel the operation.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> api.delete_operation(name)
+
+ Args:
+ name (str): The name of the operation resource to be deleted.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ # Create the request object.
+ request = operations_pb2.DeleteOperationRequest(name=name)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ self._delete_operation(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
diff --git a/Lib/site-packages/google/api_core/operations_v1/operations_client_config.py b/Lib/site-packages/google/api_core/operations_v1/operations_client_config.py
new file mode 100644
index 0000000..3ad3548
--- /dev/null
+++ b/Lib/site-packages/google/api_core/operations_v1/operations_client_config.py
@@ -0,0 +1,60 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""gapic configuration for the google.longrunning.operations client."""
+
+# DEPRECATED: retry and timeout classes are instantiated directly
+config = {
+ "interfaces": {
+ "google.longrunning.Operations": {
+ "retry_codes": {
+ "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
+ "non_idempotent": [],
+ },
+ "retry_params": {
+ "default": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 20000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
+ }
+ },
+ "methods": {
+ "GetOperation": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "ListOperations": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "CancelOperation": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "DeleteOperation": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ },
+ }
+ }
+}
diff --git a/Lib/site-packages/google/api_core/operations_v1/pagers.py b/Lib/site-packages/google/api_core/operations_v1/pagers.py
new file mode 100644
index 0000000..b8a4775
--- /dev/null
+++ b/Lib/site-packages/google/api_core/operations_v1/pagers.py
@@ -0,0 +1,86 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from typing import (
+ Any,
+ Callable,
+ Iterator,
+ Sequence,
+ Tuple,
+)
+
+from google.longrunning import operations_pb2
+
+
+class ListOperationsPager:
+ """A pager for iterating through ``list_operations`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``operations`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListOperations`` requests and continue to iterate
+ through the ``operations`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., operations_pb2.ListOperationsResponse],
+ request: operations_pb2.ListOperationsRequest,
+ response: operations_pb2.ListOperationsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (google.longrunning.operations_pb2.ListOperationsRequest):
+ The initial request object.
+ response (google.longrunning.operations_pb2.ListOperationsResponse):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = request
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ def pages(self) -> Iterator[operations_pb2.ListOperationsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __iter__(self) -> Iterator[operations_pb2.Operation]:
+ for page in self.pages:
+ yield from page.operations
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/Lib/site-packages/google/api_core/operations_v1/transports/__init__.py b/Lib/site-packages/google/api_core/operations_v1/transports/__init__.py
new file mode 100644
index 0000000..df53e15
--- /dev/null
+++ b/Lib/site-packages/google/api_core/operations_v1/transports/__init__.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+
+from .base import OperationsTransport
+from .rest import OperationsRestTransport
+
+
+# Compile a registry of transports.
+_transport_registry = OrderedDict()
+_transport_registry["rest"] = OperationsRestTransport
+
+__all__ = (
+ "OperationsTransport",
+ "OperationsRestTransport",
+)
diff --git a/Lib/site-packages/google/api_core/operations_v1/transports/base.py b/Lib/site-packages/google/api_core/operations_v1/transports/base.py
new file mode 100644
index 0000000..98cf789
--- /dev/null
+++ b/Lib/site-packages/google/api_core/operations_v1/transports/base.py
@@ -0,0 +1,232 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import abc
+from typing import Awaitable, Callable, Optional, Sequence, Union
+
+import google.api_core # type: ignore
+from google.api_core import exceptions as core_exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.api_core import version
+import google.auth # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
+from google.longrunning import operations_pb2
+from google.oauth2 import service_account # type: ignore
+from google.protobuf import empty_pb2 # type: ignore
+from grpc import Compression
+
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=version.__version__,
+)
+
+
+class OperationsTransport(abc.ABC):
+ """Abstract transport class for Operations."""
+
+ AUTH_SCOPES = ()
+
+ DEFAULT_HOST: str = "longrunning.googleapis.com"
+
+ def __init__(
+ self,
+ *,
+ host: str = DEFAULT_HOST,
+ credentials: ga_credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ **kwargs,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]):
+ The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scopes (Optional[Sequence[str]]): A list of scopes.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ """
+ # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+ if ":" not in host:
+ host += ":443"
+ self._host = host
+
+ scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
+
+ # Save the scopes.
+ self._scopes = scopes
+
+ # If no credentials are provided, then determine the appropriate
+ # defaults.
+ if credentials and credentials_file:
+ raise core_exceptions.DuplicateCredentialArgs(
+ "'credentials_file' and 'credentials' are mutually exclusive"
+ )
+
+ if credentials_file is not None:
+ credentials, _ = google.auth.load_credentials_from_file(
+ credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
+ )
+
+ elif credentials is None:
+ credentials, _ = google.auth.default(
+ **scopes_kwargs, quota_project_id=quota_project_id
+ )
+
+ # If the credentials are service account credentials, then always try to use self signed JWT.
+ if (
+ always_use_jwt_access
+ and isinstance(credentials, service_account.Credentials)
+ and hasattr(service_account.Credentials, "with_always_use_jwt_access")
+ ):
+ credentials = credentials.with_always_use_jwt_access(True)
+
+ # Save the credentials.
+ self._credentials = credentials
+
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.list_operations: gapic_v1.method.wrap_method(
+ self.list_operations,
+ default_retry=retries.Retry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ default_compression=Compression.NoCompression,
+ client_info=client_info,
+ ),
+ self.get_operation: gapic_v1.method.wrap_method(
+ self.get_operation,
+ default_retry=retries.Retry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ default_compression=Compression.NoCompression,
+ client_info=client_info,
+ ),
+ self.delete_operation: gapic_v1.method.wrap_method(
+ self.delete_operation,
+ default_retry=retries.Retry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ default_compression=Compression.NoCompression,
+ client_info=client_info,
+ ),
+ self.cancel_operation: gapic_v1.method.wrap_method(
+ self.cancel_operation,
+ default_retry=retries.Retry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ default_compression=Compression.NoCompression,
+ client_info=client_info,
+ ),
+ }
+
+ def close(self):
+ """Closes resources associated with the transport.
+
+ .. warning::
+ Only call this method if the transport is NOT shared
+ with other clients - this may cause errors in other clients!
+ """
+ raise NotImplementedError()
+
+ @property
+ def list_operations(
+ self,
+ ) -> Callable[
+ [operations_pb2.ListOperationsRequest],
+ Union[
+ operations_pb2.ListOperationsResponse,
+ Awaitable[operations_pb2.ListOperationsResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def get_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.GetOperationRequest],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def delete_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.DeleteOperationRequest],
+ Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def cancel_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.CancelOperationRequest],
+ Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
+ ]:
+ raise NotImplementedError()
+
+
+__all__ = ("OperationsTransport",)
diff --git a/Lib/site-packages/google/api_core/operations_v1/transports/rest.py b/Lib/site-packages/google/api_core/operations_v1/transports/rest.py
new file mode 100644
index 0000000..49f99d2
--- /dev/null
+++ b/Lib/site-packages/google/api_core/operations_v1/transports/rest.py
@@ -0,0 +1,488 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import re
+from typing import Callable, Dict, Optional, Sequence, Tuple, Union
+
+from requests import __version__ as requests_version
+
+from google.api_core import exceptions as core_exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import path_template # type: ignore
+from google.api_core import rest_helpers # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.transport.requests import AuthorizedSession # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2 # type: ignore
+from google.protobuf import json_format # type: ignore
+import grpc
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, OperationsTransport
+
+OptionalRetry = Union[retries.Retry, object]
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
+ grpc_version=None,
+ rest_version=requests_version,
+)
+
+
+class OperationsRestTransport(OperationsTransport):
+ """REST backend transport for Operations.
+
+ Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends JSON representations of protocol buffers over HTTP/1.1
+ """
+
+ def __init__(
+ self,
+ *,
+ host: str = "longrunning.googleapis.com",
+ credentials: ga_credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ url_scheme: str = "https",
+ http_options: Optional[Dict] = None,
+ path_prefix: str = "v1",
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]):
+ The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional(Sequence[str])): A list of scopes. This argument is
+ ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client
+ certificate to configure mutual TLS HTTP channel. It is ignored
+ if ``channel`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ url_scheme: the protocol scheme for the API endpoint. Normally
+ "https", but for testing or local servers,
+ "http" can be specified.
+ http_options: a dictionary of http_options for transcoding, to override
+ the defaults from operations.proto. Each method has an entry
+ with the corresponding http rules as value.
+ path_prefix: path prefix (usually represents API version). Set to
+ "v1" by default.
+
+ """
+ # Run the base constructor
+ # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
+ # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
+ # credentials object
+ maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host)
+ if maybe_url_match is None:
+ raise ValueError(
+ f"Unexpected hostname structure: {host}"
+ ) # pragma: NO COVER
+
+ url_match_items = maybe_url_match.groupdict()
+
+ host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
+
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
+ )
+ self._session = AuthorizedSession(
+ self._credentials, default_host=self.DEFAULT_HOST
+ )
+ if client_cert_source_for_mtls:
+ self._session.configure_mtls_channel(client_cert_source_for_mtls)
+ self._prep_wrapped_messages(client_info)
+ self._http_options = http_options or {}
+ self._path_prefix = path_prefix
+
+ def _list_operations(
+ self,
+ request: operations_pb2.ListOperationsRequest,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.ListOperationsResponse:
+ r"""Call the list operations method over HTTP.
+
+ Args:
+ request (~.operations_pb2.ListOperationsRequest):
+ The request object. The request message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operations_pb2.ListOperationsResponse:
+ The response message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ """
+
+ http_options = [
+ {
+ "method": "get",
+ "uri": "/{}/{{name=**}}/operations".format(self._path_prefix),
+ },
+ ]
+ if "google.longrunning.Operations.ListOperations" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.ListOperations"
+ ]
+
+ request_kwargs = json_format.MessageToDict(
+ request,
+ preserving_proto_field_name=True,
+ including_default_value_fields=True,
+ )
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.ListOperationsRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ raise core_exceptions.from_http_response(response)
+
+ # Return the response
+ api_response = operations_pb2.ListOperationsResponse()
+ json_format.Parse(response.content, api_response, ignore_unknown_fields=False)
+ return api_response
+
+ def _get_operation(
+ self,
+ request: operations_pb2.GetOperationRequest,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.Operation:
+ r"""Call the get operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.GetOperationRequest):
+ The request object. The request message for
+ [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operations_pb2.Operation:
+ This resource represents a long-
+ running operation that is the result of a
+ network API call.
+
+ """
+
+ http_options = [
+ {
+ "method": "get",
+ "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix),
+ },
+ ]
+ if "google.longrunning.Operations.GetOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.GetOperation"
+ ]
+
+ request_kwargs = json_format.MessageToDict(
+ request,
+ preserving_proto_field_name=True,
+ including_default_value_fields=True,
+ )
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.GetOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ raise core_exceptions.from_http_response(response)
+
+ # Return the response
+ api_response = operations_pb2.Operation()
+ json_format.Parse(response.content, api_response, ignore_unknown_fields=False)
+ return api_response
+
+ def _delete_operation(
+ self,
+ request: operations_pb2.DeleteOperationRequest,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> empty_pb2.Empty:
+ r"""Call the delete operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.DeleteOperationRequest):
+ The request object. The request message for
+ [Operations.DeleteOperation][google.api_core.operations_v1.Operations.DeleteOperation].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+
+ http_options = [
+ {
+ "method": "delete",
+ "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix),
+ },
+ ]
+ if "google.longrunning.Operations.DeleteOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.DeleteOperation"
+ ]
+
+ request_kwargs = json_format.MessageToDict(
+ request,
+ preserving_proto_field_name=True,
+ including_default_value_fields=True,
+ )
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.DeleteOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ raise core_exceptions.from_http_response(response)
+
+ return empty_pb2.Empty()
+
+ def _cancel_operation(
+ self,
+ request: operations_pb2.CancelOperationRequest,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> empty_pb2.Empty:
+ r"""Call the cancel operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.CancelOperationRequest):
+ The request object. The request message for
+ [Operations.CancelOperation][google.api_core.operations_v1.Operations.CancelOperation].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+
+ http_options = [
+ {
+ "method": "post",
+ "uri": "/{}/{{name=**/operations/*}}:cancel".format(self._path_prefix),
+ "body": "*",
+ },
+ ]
+ if "google.longrunning.Operations.CancelOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.CancelOperation"
+ ]
+
+ request_kwargs = json_format.MessageToDict(
+ request,
+ preserving_proto_field_name=True,
+ including_default_value_fields=True,
+ )
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ # Jsonify the request body
+ body_request = operations_pb2.CancelOperationRequest()
+ json_format.ParseDict(transcoded_request["body"], body_request)
+ body = json_format.MessageToDict(
+ body_request,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.CancelOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ data=body,
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ raise core_exceptions.from_http_response(response)
+
+ return empty_pb2.Empty()
+
+ @property
+ def list_operations(
+ self,
+ ) -> Callable[
+ [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse
+ ]:
+ return self._list_operations
+
+ @property
+ def get_operation(
+ self,
+ ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
+ return self._get_operation
+
+ @property
+ def delete_operation(
+ self,
+ ) -> Callable[[operations_pb2.DeleteOperationRequest], empty_pb2.Empty]:
+ return self._delete_operation
+
+ @property
+ def cancel_operation(
+ self,
+ ) -> Callable[[operations_pb2.CancelOperationRequest], empty_pb2.Empty]:
+ return self._cancel_operation
+
+
+__all__ = ("OperationsRestTransport",)
diff --git a/Lib/site-packages/google/api_core/page_iterator.py b/Lib/site-packages/google/api_core/page_iterator.py
new file mode 100644
index 0000000..23761ec
--- /dev/null
+++ b/Lib/site-packages/google/api_core/page_iterator.py
@@ -0,0 +1,571 @@
+# Copyright 2015 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Iterators for paging through paged API methods.
+
+These iterators simplify the process of paging through API responses
+where the request takes a page token and the response is a list of results with
+a token for the next page. See `list pagination`_ in the Google API Style Guide
+for more details.
+
+.. _list pagination:
+ https://cloud.google.com/apis/design/design_patterns#list_pagination
+
+API clients that have methods that follow the list pagination pattern can
+return an :class:`.Iterator`. You can use this iterator to get **all** of
+the results across all pages::
+
+ >>> results_iterator = client.list_resources()
+ >>> list(results_iterator) # Convert to a list (consumes all values).
+
+Or you can walk your way through items and call off the search early if
+you find what you're looking for (resulting in possibly fewer requests)::
+
+ >>> for resource in results_iterator:
+ ... print(resource.name)
+ ... if not resource.is_valid:
+ ... break
+
+At any point, you may check the number of items consumed by referencing the
+``num_results`` property of the iterator::
+
+ >>> for my_item in results_iterator:
+ ... if results_iterator.num_results >= 10:
+ ... break
+
+When iterating, not every new item will send a request to the server.
+To iterate based on each page of items (where a page corresponds to
+a request)::
+
+ >>> for page in results_iterator.pages:
+ ... print('=' * 20)
+ ... print(' Page number: {:d}'.format(iterator.page_number))
+ ... print(' Items in page: {:d}'.format(page.num_items))
+ ... print(' First item: {!r}'.format(next(page)))
+ ... print('Items remaining: {:d}'.format(page.remaining))
+ ... print('Next page token: {}'.format(iterator.next_page_token))
+ ====================
+ Page number: 1
+ Items in page: 1
+ First item:
+ Items remaining: 0
+ Next page token: eav1OzQB0OM8rLdGXOEsyQWSG
+ ====================
+ Page number: 2
+ Items in page: 19
+ First item:
+ Items remaining: 18
+ Next page token: None
+
+Then, for each page you can get all the resources on that page by iterating
+through it or using :func:`list`::
+
+ >>> list(page)
+ [
+ ,
+ ,
+ ,
+ ]
+"""
+
+import abc
+
+
+class Page(object):
+ """Single page of results in an iterator.
+
+ Args:
+ parent (google.api_core.page_iterator.Iterator): The iterator that owns
+ the current page.
+ items (Sequence[Any]): An iterable (that also defines __len__) of items
+ from a raw API response.
+ item_to_value (Callable[google.api_core.page_iterator.Iterator, Any]):
+ Callable to convert an item from the type in the raw API response
+ into the native object. Will be called with the iterator and a
+ single item.
+ raw_page Optional[google.protobuf.message.Message]:
+ The raw page response.
+ """
+
+ def __init__(self, parent, items, item_to_value, raw_page=None):
+ self._parent = parent
+ self._num_items = len(items)
+ self._remaining = self._num_items
+ self._item_iter = iter(items)
+ self._item_to_value = item_to_value
+ self._raw_page = raw_page
+
+ @property
+ def raw_page(self):
+ """google.protobuf.message.Message"""
+ return self._raw_page
+
+ @property
+ def num_items(self):
+ """int: Total items in the page."""
+ return self._num_items
+
+ @property
+ def remaining(self):
+ """int: Remaining items in the page."""
+ return self._remaining
+
+ def __iter__(self):
+ """The :class:`Page` is an iterator of items."""
+ return self
+
+ def __next__(self):
+ """Get the next value in the page."""
+ item = next(self._item_iter)
+ result = self._item_to_value(self._parent, item)
+ # Since we've successfully got the next value from the
+ # iterator, we update the number of remaining.
+ self._remaining -= 1
+ return result
+
+
+def _item_to_value_identity(iterator, item):
+ """An item to value transformer that returns the item un-changed."""
+ # pylint: disable=unused-argument
+ # We are conforming to the interface defined by Iterator.
+ return item
+
+
+class Iterator(object, metaclass=abc.ABCMeta):
+ """A generic class for iterating through API list responses.
+
+ Args:
+ client(google.cloud.client.Client): The API client.
+ item_to_value (Callable[google.api_core.page_iterator.Iterator, Any]):
+ Callable to convert an item from the type in the raw API response
+ into the native object. Will be called with the iterator and a
+ single item.
+ page_token (str): A token identifying a page in a result set to start
+ fetching results from.
+ max_results (int): The maximum number of results to fetch.
+ """
+
+ def __init__(
+ self,
+ client,
+ item_to_value=_item_to_value_identity,
+ page_token=None,
+ max_results=None,
+ ):
+ self._started = False
+ self.__active_iterator = None
+
+ self.client = client
+ """Optional[Any]: The client that created this iterator."""
+ self.item_to_value = item_to_value
+ """Callable[Iterator, Any]: Callable to convert an item from the type
+ in the raw API response into the native object. Will be called with
+ the iterator and a
+ single item.
+ """
+ self.max_results = max_results
+ """int: The maximum number of results to fetch"""
+
+ # The attributes below will change over the life of the iterator.
+ self.page_number = 0
+ """int: The current page of results."""
+ self.next_page_token = page_token
+ """str: The token for the next page of results. If this is set before
+ the iterator starts, it effectively offsets the iterator to a
+ specific starting point."""
+ self.num_results = 0
+ """int: The total number of results fetched so far."""
+
+ @property
+ def pages(self):
+ """Iterator of pages in the response.
+
+ returns:
+ types.GeneratorType[google.api_core.page_iterator.Page]: A
+ generator of page instances.
+
+ raises:
+ ValueError: If the iterator has already been started.
+ """
+ if self._started:
+ raise ValueError("Iterator has already started", self)
+ self._started = True
+ return self._page_iter(increment=True)
+
+ def _items_iter(self):
+ """Iterator for each item returned."""
+ for page in self._page_iter(increment=False):
+ for item in page:
+ self.num_results += 1
+ yield item
+
+ def __iter__(self):
+ """Iterator for each item returned.
+
+ Returns:
+ types.GeneratorType[Any]: A generator of items from the API.
+
+ Raises:
+ ValueError: If the iterator has already been started.
+ """
+ if self._started:
+ raise ValueError("Iterator has already started", self)
+ self._started = True
+ return self._items_iter()
+
+ def __next__(self):
+ if self.__active_iterator is None:
+ self.__active_iterator = iter(self)
+ return next(self.__active_iterator)
+
+ def _page_iter(self, increment):
+ """Generator of pages of API responses.
+
+ Args:
+ increment (bool): Flag indicating if the total number of results
+ should be incremented on each page. This is useful since a page
+ iterator will want to increment by results per page while an
+ items iterator will want to increment per item.
+
+ Yields:
+ Page: each page of items from the API.
+ """
+ page = self._next_page()
+ while page is not None:
+ self.page_number += 1
+ if increment:
+ self.num_results += page.num_items
+ yield page
+ page = self._next_page()
+
+ @abc.abstractmethod
+ def _next_page(self):
+ """Get the next page in the iterator.
+
+ This does nothing and is intended to be over-ridden by subclasses
+ to return the next :class:`Page`.
+
+ Raises:
+ NotImplementedError: Always, this method is abstract.
+ """
+ raise NotImplementedError
+
+
+def _do_nothing_page_start(iterator, page, response):
+ """Helper to provide custom behavior after a :class:`Page` is started.
+
+ This is a do-nothing stand-in as the default value.
+
+ Args:
+ iterator (Iterator): An iterator that holds some request info.
+ page (Page): The page that was just created.
+ response (Any): The API response for a page.
+ """
+ # pylint: disable=unused-argument
+ pass
+
+
+class HTTPIterator(Iterator):
+ """A generic class for iterating through HTTP/JSON API list responses.
+
+ To make an iterator work, you'll need to provide a way to convert a JSON
+ item returned from the API into the object of your choice (via
+ ``item_to_value``). You also may need to specify a custom ``items_key`` so
+ that a given response (containing a page of results) can be parsed into an
+ iterable page of the actual objects you want.
+
+ Args:
+ client (google.cloud.client.Client): The API client.
+ api_request (Callable): The function to use to make API requests.
+ Generally, this will be
+ :meth:`google.cloud._http.JSONConnection.api_request`.
+ path (str): The method path to query for the list of items.
+ item_to_value (Callable[google.api_core.page_iterator.Iterator, Any]):
+ Callable to convert an item from the type in the JSON response into
+ a native object. Will be called with the iterator and a single
+ item.
+ items_key (str): The key in the API response where the list of items
+ can be found.
+ page_token (str): A token identifying a page in a result set to start
+ fetching results from.
+ page_size (int): The maximum number of results to fetch per page
+ max_results (int): The maximum number of results to fetch
+ extra_params (dict): Extra query string parameters for the
+ API call.
+ page_start (Callable[
+ google.api_core.page_iterator.Iterator,
+ google.api_core.page_iterator.Page, dict]): Callable to provide
+ any special behavior after a new page has been created. Assumed
+ signature takes the :class:`.Iterator` that started the page,
+ the :class:`.Page` that was started and the dictionary containing
+ the page response.
+ next_token (str): The name of the field used in the response for page
+ tokens.
+
+ .. autoattribute:: pages
+ """
+
+ _DEFAULT_ITEMS_KEY = "items"
+ _PAGE_TOKEN = "pageToken"
+ _MAX_RESULTS = "maxResults"
+ _NEXT_TOKEN = "nextPageToken"
+ _RESERVED_PARAMS = frozenset([_PAGE_TOKEN])
+ _HTTP_METHOD = "GET"
+
+ def __init__(
+ self,
+ client,
+ api_request,
+ path,
+ item_to_value,
+ items_key=_DEFAULT_ITEMS_KEY,
+ page_token=None,
+ page_size=None,
+ max_results=None,
+ extra_params=None,
+ page_start=_do_nothing_page_start,
+ next_token=_NEXT_TOKEN,
+ ):
+ super(HTTPIterator, self).__init__(
+ client, item_to_value, page_token=page_token, max_results=max_results
+ )
+ self.api_request = api_request
+ self.path = path
+ self._items_key = items_key
+ self.extra_params = extra_params
+ self._page_size = page_size
+ self._page_start = page_start
+ self._next_token = next_token
+ # Verify inputs / provide defaults.
+ if self.extra_params is None:
+ self.extra_params = {}
+ self._verify_params()
+
+ def _verify_params(self):
+ """Verifies the parameters don't use any reserved parameter.
+
+ Raises:
+ ValueError: If a reserved parameter is used.
+ """
+ reserved_in_use = self._RESERVED_PARAMS.intersection(self.extra_params)
+ if reserved_in_use:
+ raise ValueError("Using a reserved parameter", reserved_in_use)
+
+ def _next_page(self):
+ """Get the next page in the iterator.
+
+ Returns:
+ Optional[Page]: The next page in the iterator or :data:`None` if
+ there are no pages left.
+ """
+ if self._has_next_page():
+ response = self._get_next_page_response()
+ items = response.get(self._items_key, ())
+ page = Page(self, items, self.item_to_value, raw_page=response)
+ self._page_start(self, page, response)
+ self.next_page_token = response.get(self._next_token)
+ return page
+ else:
+ return None
+
+ def _has_next_page(self):
+ """Determines whether or not there are more pages with results.
+
+ Returns:
+ bool: Whether the iterator has more pages.
+ """
+ if self.page_number == 0:
+ return True
+
+ if self.max_results is not None:
+ if self.num_results >= self.max_results:
+ return False
+
+ return self.next_page_token is not None
+
+ def _get_query_params(self):
+ """Getter for query parameters for the next request.
+
+ Returns:
+ dict: A dictionary of query parameters.
+ """
+ result = {}
+ if self.next_page_token is not None:
+ result[self._PAGE_TOKEN] = self.next_page_token
+
+ page_size = None
+ if self.max_results is not None:
+ page_size = self.max_results - self.num_results
+ if self._page_size is not None:
+ page_size = min(page_size, self._page_size)
+ elif self._page_size is not None:
+ page_size = self._page_size
+
+ if page_size is not None:
+ result[self._MAX_RESULTS] = page_size
+
+ result.update(self.extra_params)
+ return result
+
+ def _get_next_page_response(self):
+ """Requests the next page from the path provided.
+
+ Returns:
+ dict: The parsed JSON response of the next page's contents.
+
+ Raises:
+ ValueError: If the HTTP method is not ``GET`` or ``POST``.
+ """
+ params = self._get_query_params()
+ if self._HTTP_METHOD == "GET":
+ return self.api_request(
+ method=self._HTTP_METHOD, path=self.path, query_params=params
+ )
+ elif self._HTTP_METHOD == "POST":
+ return self.api_request(
+ method=self._HTTP_METHOD, path=self.path, data=params
+ )
+ else:
+ raise ValueError("Unexpected HTTP method", self._HTTP_METHOD)
+
+
+class _GAXIterator(Iterator):
+ """A generic class for iterating through Cloud gRPC APIs list responses.
+
+ Any:
+ client (google.cloud.client.Client): The API client.
+ page_iter (google.gax.PageIterator): A GAX page iterator to be wrapped
+ to conform to the :class:`Iterator` interface.
+ item_to_value (Callable[Iterator, Any]): Callable to convert an item
+ from the protobuf response into a native object. Will
+ be called with the iterator and a single item.
+ max_results (int): The maximum number of results to fetch.
+
+ .. autoattribute:: pages
+ """
+
+ def __init__(self, client, page_iter, item_to_value, max_results=None):
+ super(_GAXIterator, self).__init__(
+ client,
+ item_to_value,
+ page_token=page_iter.page_token,
+ max_results=max_results,
+ )
+ self._gax_page_iter = page_iter
+
+ def _next_page(self):
+ """Get the next page in the iterator.
+
+ Wraps the response from the :class:`~google.gax.PageIterator` in a
+ :class:`Page` instance and captures some state at each page.
+
+ Returns:
+ Optional[Page]: The next page in the iterator or :data:`None` if
+ there are no pages left.
+ """
+ try:
+ items = next(self._gax_page_iter)
+ page = Page(self, items, self.item_to_value)
+ self.next_page_token = self._gax_page_iter.page_token or None
+ return page
+ except StopIteration:
+ return None
+
+
+class GRPCIterator(Iterator):
+ """A generic class for iterating through gRPC list responses.
+
+ .. note:: The class does not take a ``page_token`` argument because it can
+ just be specified in the ``request``.
+
+ Args:
+ client (google.cloud.client.Client): The API client. This unused by
+ this class, but kept to satisfy the :class:`Iterator` interface.
+ method (Callable[protobuf.Message]): A bound gRPC method that should
+ take a single message for the request.
+ request (protobuf.Message): The request message.
+ items_field (str): The field in the response message that has the
+ items for the page.
+ item_to_value (Callable[GRPCIterator, Any]): Callable to convert an
+ item from the type in the JSON response into a native object. Will
+ be called with the iterator and a single item.
+ request_token_field (str): The field in the request message used to
+ specify the page token.
+ response_token_field (str): The field in the response message that has
+ the token for the next page.
+ max_results (int): The maximum number of results to fetch.
+
+ .. autoattribute:: pages
+ """
+
+ _DEFAULT_REQUEST_TOKEN_FIELD = "page_token"
+ _DEFAULT_RESPONSE_TOKEN_FIELD = "next_page_token"
+
+ def __init__(
+ self,
+ client,
+ method,
+ request,
+ items_field,
+ item_to_value=_item_to_value_identity,
+ request_token_field=_DEFAULT_REQUEST_TOKEN_FIELD,
+ response_token_field=_DEFAULT_RESPONSE_TOKEN_FIELD,
+ max_results=None,
+ ):
+ super(GRPCIterator, self).__init__(
+ client, item_to_value, max_results=max_results
+ )
+ self._method = method
+ self._request = request
+ self._items_field = items_field
+ self._request_token_field = request_token_field
+ self._response_token_field = response_token_field
+
+ def _next_page(self):
+ """Get the next page in the iterator.
+
+ Returns:
+ Page: The next page in the iterator or :data:`None` if
+ there are no pages left.
+ """
+ if not self._has_next_page():
+ return None
+
+ if self.next_page_token is not None:
+ setattr(self._request, self._request_token_field, self.next_page_token)
+
+ response = self._method(self._request)
+
+ self.next_page_token = getattr(response, self._response_token_field)
+ items = getattr(response, self._items_field)
+ page = Page(self, items, self.item_to_value, raw_page=response)
+
+ return page
+
+ def _has_next_page(self):
+ """Determines whether or not there are more pages with results.
+
+ Returns:
+ bool: Whether the iterator has more pages.
+ """
+ if self.page_number == 0:
+ return True
+
+ if self.max_results is not None:
+ if self.num_results >= self.max_results:
+ return False
+
+ # Note: intentionally a falsy check instead of a None check. The RPC
+ # can return an empty string indicating no more pages.
+ return True if self.next_page_token else False
diff --git a/Lib/site-packages/google/api_core/page_iterator_async.py b/Lib/site-packages/google/api_core/page_iterator_async.py
new file mode 100644
index 0000000..c072575
--- /dev/null
+++ b/Lib/site-packages/google/api_core/page_iterator_async.py
@@ -0,0 +1,285 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""AsyncIO iterators for paging through paged API methods.
+
+These iterators simplify the process of paging through API responses
+where the request takes a page token and the response is a list of results with
+a token for the next page. See `list pagination`_ in the Google API Style Guide
+for more details.
+
+.. _list pagination:
+ https://cloud.google.com/apis/design/design_patterns#list_pagination
+
+API clients that have methods that follow the list pagination pattern can
+return an :class:`.AsyncIterator`:
+
+ >>> results_iterator = await client.list_resources()
+
+Or you can walk your way through items and call off the search early if
+you find what you're looking for (resulting in possibly fewer requests)::
+
+ >>> async for resource in results_iterator:
+ ... print(resource.name)
+ ... if not resource.is_valid:
+ ... break
+
+At any point, you may check the number of items consumed by referencing the
+``num_results`` property of the iterator::
+
+ >>> async for my_item in results_iterator:
+ ... if results_iterator.num_results >= 10:
+ ... break
+
+When iterating, not every new item will send a request to the server.
+To iterate based on each page of items (where a page corresponds to
+a request)::
+
+ >>> async for page in results_iterator.pages:
+ ... print('=' * 20)
+ ... print(' Page number: {:d}'.format(iterator.page_number))
+ ... print(' Items in page: {:d}'.format(page.num_items))
+ ... print(' First item: {!r}'.format(next(page)))
+ ... print('Items remaining: {:d}'.format(page.remaining))
+ ... print('Next page token: {}'.format(iterator.next_page_token))
+ ====================
+ Page number: 1
+ Items in page: 1
+ First item:
+ Items remaining: 0
+ Next page token: eav1OzQB0OM8rLdGXOEsyQWSG
+ ====================
+ Page number: 2
+ Items in page: 19
+ First item:
+ Items remaining: 18
+ Next page token: None
+"""
+
+import abc
+
+from google.api_core.page_iterator import Page
+
+
+def _item_to_value_identity(iterator, item):
+ """An item to value transformer that returns the item un-changed."""
+ # pylint: disable=unused-argument
+ # We are conforming to the interface defined by Iterator.
+ return item
+
+
+class AsyncIterator(abc.ABC):
+ """A generic class for iterating through API list responses.
+
+ Args:
+ client(google.cloud.client.Client): The API client.
+ item_to_value (Callable[google.api_core.page_iterator_async.AsyncIterator, Any]):
+ Callable to convert an item from the type in the raw API response
+ into the native object. Will be called with the iterator and a
+ single item.
+ page_token (str): A token identifying a page in a result set to start
+ fetching results from.
+ max_results (int): The maximum number of results to fetch.
+ """
+
+ def __init__(
+ self,
+ client,
+ item_to_value=_item_to_value_identity,
+ page_token=None,
+ max_results=None,
+ ):
+ self._started = False
+ self.__active_aiterator = None
+
+ self.client = client
+ """Optional[Any]: The client that created this iterator."""
+ self.item_to_value = item_to_value
+ """Callable[Iterator, Any]: Callable to convert an item from the type
+ in the raw API response into the native object. Will be called with
+ the iterator and a
+ single item.
+ """
+ self.max_results = max_results
+ """int: The maximum number of results to fetch."""
+
+ # The attributes below will change over the life of the iterator.
+ self.page_number = 0
+ """int: The current page of results."""
+ self.next_page_token = page_token
+ """str: The token for the next page of results. If this is set before
+ the iterator starts, it effectively offsets the iterator to a
+ specific starting point."""
+ self.num_results = 0
+ """int: The total number of results fetched so far."""
+
+ @property
+ def pages(self):
+ """Iterator of pages in the response.
+
+ returns:
+ types.GeneratorType[google.api_core.page_iterator.Page]: A
+ generator of page instances.
+
+ raises:
+ ValueError: If the iterator has already been started.
+ """
+ if self._started:
+ raise ValueError("Iterator has already started", self)
+ self._started = True
+ return self._page_aiter(increment=True)
+
+ async def _items_aiter(self):
+ """Iterator for each item returned."""
+ async for page in self._page_aiter(increment=False):
+ for item in page:
+ self.num_results += 1
+ yield item
+
+ def __aiter__(self):
+ """Iterator for each item returned.
+
+ Returns:
+ types.GeneratorType[Any]: A generator of items from the API.
+
+ Raises:
+ ValueError: If the iterator has already been started.
+ """
+ if self._started:
+ raise ValueError("Iterator has already started", self)
+ self._started = True
+ return self._items_aiter()
+
+ async def __anext__(self):
+ if self.__active_aiterator is None:
+ self.__active_aiterator = self.__aiter__()
+ return await self.__active_aiterator.__anext__()
+
+ async def _page_aiter(self, increment):
+ """Generator of pages of API responses.
+
+ Args:
+ increment (bool): Flag indicating if the total number of results
+ should be incremented on each page. This is useful since a page
+ iterator will want to increment by results per page while an
+ items iterator will want to increment per item.
+
+ Yields:
+ Page: each page of items from the API.
+ """
+ page = await self._next_page()
+ while page is not None:
+ self.page_number += 1
+ if increment:
+ self.num_results += page.num_items
+ yield page
+ page = await self._next_page()
+
+ @abc.abstractmethod
+ async def _next_page(self):
+ """Get the next page in the iterator.
+
+ This does nothing and is intended to be over-ridden by subclasses
+ to return the next :class:`Page`.
+
+ Raises:
+ NotImplementedError: Always, this method is abstract.
+ """
+ raise NotImplementedError
+
+
+class AsyncGRPCIterator(AsyncIterator):
+ """A generic class for iterating through gRPC list responses.
+
+ .. note:: The class does not take a ``page_token`` argument because it can
+ just be specified in the ``request``.
+
+ Args:
+ client (google.cloud.client.Client): The API client. This unused by
+ this class, but kept to satisfy the :class:`Iterator` interface.
+ method (Callable[protobuf.Message]): A bound gRPC method that should
+ take a single message for the request.
+ request (protobuf.Message): The request message.
+ items_field (str): The field in the response message that has the
+ items for the page.
+ item_to_value (Callable[GRPCIterator, Any]): Callable to convert an
+ item from the type in the JSON response into a native object. Will
+ be called with the iterator and a single item.
+ request_token_field (str): The field in the request message used to
+ specify the page token.
+ response_token_field (str): The field in the response message that has
+ the token for the next page.
+ max_results (int): The maximum number of results to fetch.
+
+ .. autoattribute:: pages
+ """
+
+ _DEFAULT_REQUEST_TOKEN_FIELD = "page_token"
+ _DEFAULT_RESPONSE_TOKEN_FIELD = "next_page_token"
+
+ def __init__(
+ self,
+ client,
+ method,
+ request,
+ items_field,
+ item_to_value=_item_to_value_identity,
+ request_token_field=_DEFAULT_REQUEST_TOKEN_FIELD,
+ response_token_field=_DEFAULT_RESPONSE_TOKEN_FIELD,
+ max_results=None,
+ ):
+ super().__init__(client, item_to_value, max_results=max_results)
+ self._method = method
+ self._request = request
+ self._items_field = items_field
+ self._request_token_field = request_token_field
+ self._response_token_field = response_token_field
+
+ async def _next_page(self):
+ """Get the next page in the iterator.
+
+ Returns:
+ Page: The next page in the iterator or :data:`None` if
+ there are no pages left.
+ """
+ if not self._has_next_page():
+ return None
+
+ if self.next_page_token is not None:
+ setattr(self._request, self._request_token_field, self.next_page_token)
+
+ response = await self._method(self._request)
+
+ self.next_page_token = getattr(response, self._response_token_field)
+ items = getattr(response, self._items_field)
+ page = Page(self, items, self.item_to_value, raw_page=response)
+
+ return page
+
+ def _has_next_page(self):
+ """Determines whether or not there are more pages with results.
+
+ Returns:
+ bool: Whether the iterator has more pages.
+ """
+ if self.page_number == 0:
+ return True
+
+ # Note: intentionally a falsy check instead of a None check. The RPC
+ # can return an empty string indicating no more pages.
+ if self.max_results is not None:
+ if self.num_results >= self.max_results:
+ return False
+
+ return True if self.next_page_token else False
diff --git a/Lib/site-packages/google/api_core/path_template.py b/Lib/site-packages/google/api_core/path_template.py
new file mode 100644
index 0000000..b8ebb2a
--- /dev/null
+++ b/Lib/site-packages/google/api_core/path_template.py
@@ -0,0 +1,346 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Expand and validate URL path templates.
+
+This module provides the :func:`expand` and :func:`validate` functions for
+interacting with Google-style URL `path templates`_ which are commonly used
+in Google APIs for `resource names`_.
+
+.. _path templates: https://github.com/googleapis/googleapis/blob
+ /57e2d376ac7ef48681554204a3ba78a414f2c533/google/api/http.proto#L212
+.. _resource names: https://cloud.google.com/apis/design/resource_names
+"""
+
+from __future__ import unicode_literals
+
+from collections import deque
+import copy
+import functools
+import re
+
+# Regular expression for extracting variable parts from a path template.
+# The variables can be expressed as:
+#
+# - "*": a single-segment positional variable, for example: "books/*"
+# - "**": a multi-segment positional variable, for example: "shelf/**/book/*"
+# - "{name}": a single-segment wildcard named variable, for example
+# "books/{name}"
+# - "{name=*}: same as above.
+# - "{name=**}": a multi-segment wildcard named variable, for example
+# "shelf/{name=**}"
+# - "{name=/path/*/**}": a multi-segment named variable with a sub-template.
+_VARIABLE_RE = re.compile(
+ r"""
+ ( # Capture the entire variable expression
+ (?P\*\*?) # Match & capture * and ** positional variables.
+ |
+ # Match & capture named variables {name}
+ {
+ (?P[^/]+?)
+ # Optionally match and capture the named variable's template.
+ (?:=(?P.+?))?
+ }
+ )
+ """,
+ re.VERBOSE,
+)
+
+# Segment expressions used for validating paths against a template.
+_SINGLE_SEGMENT_PATTERN = r"([^/]+)"
+_MULTI_SEGMENT_PATTERN = r"(.+)"
+
+
+def _expand_variable_match(positional_vars, named_vars, match):
+ """Expand a matched variable with its value.
+
+ Args:
+ positional_vars (list): A list of positional variables. This list will
+ be modified.
+ named_vars (dict): A dictionary of named variables.
+ match (re.Match): A regular expression match.
+
+ Returns:
+ str: The expanded variable to replace the match.
+
+ Raises:
+ ValueError: If a positional or named variable is required by the
+ template but not specified or if an unexpected template expression
+ is encountered.
+ """
+ positional = match.group("positional")
+ name = match.group("name")
+ if name is not None:
+ try:
+ return str(named_vars[name])
+ except KeyError:
+ raise ValueError(
+ "Named variable '{}' not specified and needed by template "
+ "`{}` at position {}".format(name, match.string, match.start())
+ )
+ elif positional is not None:
+ try:
+ return str(positional_vars.pop(0))
+ except IndexError:
+ raise ValueError(
+ "Positional variable not specified and needed by template "
+ "`{}` at position {}".format(match.string, match.start())
+ )
+ else:
+ raise ValueError("Unknown template expression {}".format(match.group(0)))
+
+
+def expand(tmpl, *args, **kwargs):
+ """Expand a path template with the given variables.
+
+ .. code-block:: python
+
+ >>> expand('users/*/messages/*', 'me', '123')
+ users/me/messages/123
+ >>> expand('/v1/{name=shelves/*/books/*}', name='shelves/1/books/3')
+ /v1/shelves/1/books/3
+
+ Args:
+ tmpl (str): The path template.
+ args: The positional variables for the path.
+ kwargs: The named variables for the path.
+
+ Returns:
+ str: The expanded path
+
+ Raises:
+ ValueError: If a positional or named variable is required by the
+ template but not specified or if an unexpected template expression
+ is encountered.
+ """
+ replacer = functools.partial(_expand_variable_match, list(args), kwargs)
+ return _VARIABLE_RE.sub(replacer, tmpl)
+
+
+def _replace_variable_with_pattern(match):
+ """Replace a variable match with a pattern that can be used to validate it.
+
+ Args:
+ match (re.Match): A regular expression match
+
+ Returns:
+ str: A regular expression pattern that can be used to validate the
+ variable in an expanded path.
+
+ Raises:
+ ValueError: If an unexpected template expression is encountered.
+ """
+ positional = match.group("positional")
+ name = match.group("name")
+ template = match.group("template")
+ if name is not None:
+ if not template:
+ return _SINGLE_SEGMENT_PATTERN.format(name)
+ elif template == "**":
+ return _MULTI_SEGMENT_PATTERN.format(name)
+ else:
+ return _generate_pattern_for_template(template)
+ elif positional == "*":
+ return _SINGLE_SEGMENT_PATTERN
+ elif positional == "**":
+ return _MULTI_SEGMENT_PATTERN
+ else:
+ raise ValueError("Unknown template expression {}".format(match.group(0)))
+
+
+def _generate_pattern_for_template(tmpl):
+ """Generate a pattern that can validate a path template.
+
+ Args:
+ tmpl (str): The path template
+
+ Returns:
+ str: A regular expression pattern that can be used to validate an
+ expanded path template.
+ """
+ return _VARIABLE_RE.sub(_replace_variable_with_pattern, tmpl)
+
+
+def get_field(request, field):
+ """Get the value of a field from a given dictionary.
+
+ Args:
+ request (dict | Message): A dictionary or a Message object.
+ field (str): The key to the request in dot notation.
+
+ Returns:
+ The value of the field.
+ """
+ parts = field.split(".")
+ value = request
+
+ for part in parts:
+ if not isinstance(value, dict):
+ value = getattr(value, part, None)
+ else:
+ value = value.get(part)
+ if isinstance(value, dict):
+ return
+ return value
+
+
+def delete_field(request, field):
+ """Delete the value of a field from a given dictionary.
+
+ Args:
+ request (dict | Message): A dictionary object or a Message.
+ field (str): The key to the request in dot notation.
+ """
+ parts = deque(field.split("."))
+ while len(parts) > 1:
+ part = parts.popleft()
+ if not isinstance(request, dict):
+ if hasattr(request, part):
+ request = getattr(request, part, None)
+ else:
+ return
+ else:
+ request = request.get(part)
+ part = parts.popleft()
+ if not isinstance(request, dict):
+ if hasattr(request, part):
+ request.ClearField(part)
+ else:
+ return
+ else:
+ request.pop(part, None)
+
+
+def validate(tmpl, path):
+ """Validate a path against the path template.
+
+ .. code-block:: python
+
+ >>> validate('users/*/messages/*', 'users/me/messages/123')
+ True
+ >>> validate('users/*/messages/*', 'users/me/drafts/123')
+ False
+ >>> validate('/v1/{name=shelves/*/books/*}', /v1/shelves/1/books/3)
+ True
+ >>> validate('/v1/{name=shelves/*/books/*}', /v1/shelves/1/tapes/3)
+ False
+
+ Args:
+ tmpl (str): The path template.
+ path (str): The expanded path.
+
+ Returns:
+ bool: True if the path matches.
+ """
+ pattern = _generate_pattern_for_template(tmpl) + "$"
+ return True if re.match(pattern, path) is not None else False
+
+
+def transcode(http_options, message=None, **request_kwargs):
+ """Transcodes a grpc request pattern into a proper HTTP request following the rules outlined here,
+ https://github.com/googleapis/googleapis/blob/master/google/api/http.proto#L44-L312
+
+ Args:
+ http_options (list(dict)): A list of dicts which consist of these keys,
+ 'method' (str): The http method
+ 'uri' (str): The path template
+ 'body' (str): The body field name (optional)
+ (This is a simplified representation of the proto option `google.api.http`)
+
+ message (Message) : A request object (optional)
+ request_kwargs (dict) : A dict representing the request object
+
+ Returns:
+ dict: The transcoded request with these keys,
+ 'method' (str) : The http method
+ 'uri' (str) : The expanded uri
+ 'body' (dict | Message) : A dict or a Message representing the body (optional)
+ 'query_params' (dict | Message) : A dict or Message mapping query parameter variables and values
+
+ Raises:
+ ValueError: If the request does not match the given template.
+ """
+ transcoded_value = message or request_kwargs
+ bindings = []
+ for http_option in http_options:
+ request = {}
+
+ # Assign path
+ uri_template = http_option["uri"]
+ fields = [
+ (m.group("name"), m.group("template"))
+ for m in _VARIABLE_RE.finditer(uri_template)
+ ]
+ bindings.append((uri_template, fields))
+
+ path_args = {field: get_field(transcoded_value, field) for field, _ in fields}
+ request["uri"] = expand(uri_template, **path_args)
+
+ if not validate(uri_template, request["uri"]) or not all(path_args.values()):
+ continue
+
+ # Remove fields used in uri path from request
+ leftovers = copy.deepcopy(transcoded_value)
+ for path_field, _ in fields:
+ delete_field(leftovers, path_field)
+
+ # Assign body and query params
+ body = http_option.get("body")
+
+ if body:
+ if body == "*":
+ request["body"] = leftovers
+ if message:
+ request["query_params"] = message.__class__()
+ else:
+ request["query_params"] = {}
+ else:
+ try:
+ if message:
+ request["body"] = getattr(leftovers, body)
+ delete_field(leftovers, body)
+ else:
+ request["body"] = leftovers.pop(body)
+ except (KeyError, AttributeError):
+ continue
+ request["query_params"] = leftovers
+ else:
+ request["query_params"] = leftovers
+ request["method"] = http_option["method"]
+ return request
+
+ bindings_description = [
+ '\n\tURI: "{}"'
+ "\n\tRequired request fields:\n\t\t{}".format(
+ uri,
+ "\n\t\t".join(
+ [
+ 'field: "{}", pattern: "{}"'.format(n, p if p else "*")
+ for n, p in fields
+ ]
+ ),
+ )
+ for uri, fields in bindings
+ ]
+
+ raise ValueError(
+ "Invalid request."
+ "\nSome of the fields of the request message are either not initialized or "
+ "initialized with an invalid value."
+ "\nPlease make sure your request matches at least one accepted HTTP binding."
+ "\nTo match a binding the request message must have all the required fields "
+ "initialized with values matching their patterns as listed below:{}".format(
+ "\n".join(bindings_description)
+ )
+ )
diff --git a/Lib/site-packages/google/api_core/protobuf_helpers.py b/Lib/site-packages/google/api_core/protobuf_helpers.py
new file mode 100644
index 0000000..d777c5f
--- /dev/null
+++ b/Lib/site-packages/google/api_core/protobuf_helpers.py
@@ -0,0 +1,373 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for :mod:`protobuf`."""
+
+import collections
+import collections.abc
+import copy
+import inspect
+
+from google.protobuf import field_mask_pb2
+from google.protobuf import message
+from google.protobuf import wrappers_pb2
+
+
+_SENTINEL = object()
+_WRAPPER_TYPES = (
+ wrappers_pb2.BoolValue,
+ wrappers_pb2.BytesValue,
+ wrappers_pb2.DoubleValue,
+ wrappers_pb2.FloatValue,
+ wrappers_pb2.Int32Value,
+ wrappers_pb2.Int64Value,
+ wrappers_pb2.StringValue,
+ wrappers_pb2.UInt32Value,
+ wrappers_pb2.UInt64Value,
+)
+
+
+def from_any_pb(pb_type, any_pb):
+ """Converts an ``Any`` protobuf to the specified message type.
+
+ Args:
+ pb_type (type): the type of the message that any_pb stores an instance
+ of.
+ any_pb (google.protobuf.any_pb2.Any): the object to be converted.
+
+ Returns:
+ pb_type: An instance of the pb_type message.
+
+ Raises:
+ TypeError: if the message could not be converted.
+ """
+ msg = pb_type()
+
+ # Unwrap proto-plus wrapped messages.
+ if callable(getattr(pb_type, "pb", None)):
+ msg_pb = pb_type.pb(msg)
+ else:
+ msg_pb = msg
+
+ # Unpack the Any object and populate the protobuf message instance.
+ if not any_pb.Unpack(msg_pb):
+ raise TypeError(
+ "Could not convert {} to {}".format(
+ any_pb.__class__.__name__, pb_type.__name__
+ )
+ )
+
+ # Done; return the message.
+ return msg
+
+
+def check_oneof(**kwargs):
+ """Raise ValueError if more than one keyword argument is not ``None``.
+
+ Args:
+ kwargs (dict): The keyword arguments sent to the function.
+
+ Raises:
+ ValueError: If more than one entry in ``kwargs`` is not ``None``.
+ """
+ # Sanity check: If no keyword arguments were sent, this is fine.
+ if not kwargs:
+ return
+
+ not_nones = [val for val in kwargs.values() if val is not None]
+ if len(not_nones) > 1:
+ raise ValueError(
+ "Only one of {fields} should be set.".format(
+ fields=", ".join(sorted(kwargs.keys()))
+ )
+ )
+
+
+def get_messages(module):
+ """Discovers all protobuf Message classes in a given import module.
+
+ Args:
+ module (module): A Python module; :func:`dir` will be run against this
+ module to find Message subclasses.
+
+ Returns:
+ dict[str, google.protobuf.message.Message]: A dictionary with the
+ Message class names as keys, and the Message subclasses themselves
+ as values.
+ """
+ answer = collections.OrderedDict()
+ for name in dir(module):
+ candidate = getattr(module, name)
+ if inspect.isclass(candidate) and issubclass(candidate, message.Message):
+ answer[name] = candidate
+ return answer
+
+
+def _resolve_subkeys(key, separator="."):
+ """Resolve a potentially nested key.
+
+ If the key contains the ``separator`` (e.g. ``.``) then the key will be
+ split on the first instance of the subkey::
+
+ >>> _resolve_subkeys('a.b.c')
+ ('a', 'b.c')
+ >>> _resolve_subkeys('d|e|f', separator='|')
+ ('d', 'e|f')
+
+ If not, the subkey will be :data:`None`::
+
+ >>> _resolve_subkeys('foo')
+ ('foo', None)
+
+ Args:
+ key (str): A string that may or may not contain the separator.
+ separator (str): The namespace separator. Defaults to `.`.
+
+ Returns:
+ Tuple[str, str]: The key and subkey(s).
+ """
+ parts = key.split(separator, 1)
+
+ if len(parts) > 1:
+ return parts
+ else:
+ return parts[0], None
+
+
+def get(msg_or_dict, key, default=_SENTINEL):
+ """Retrieve a key's value from a protobuf Message or dictionary.
+
+ Args:
+ mdg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the
+ object.
+ key (str): The key to retrieve from the object.
+ default (Any): If the key is not present on the object, and a default
+ is set, returns that default instead. A type-appropriate falsy
+ default is generally recommended, as protobuf messages almost
+ always have default values for unset values and it is not always
+ possible to tell the difference between a falsy value and an
+ unset one. If no default is set then :class:`KeyError` will be
+ raised if the key is not present in the object.
+
+ Returns:
+ Any: The return value from the underlying Message or dict.
+
+ Raises:
+ KeyError: If the key is not found. Note that, for unset values,
+ messages and dictionaries may not have consistent behavior.
+ TypeError: If ``msg_or_dict`` is not a Message or Mapping.
+ """
+ # We may need to get a nested key. Resolve this.
+ key, subkey = _resolve_subkeys(key)
+
+ # Attempt to get the value from the two types of objects we know about.
+ # If we get something else, complain.
+ if isinstance(msg_or_dict, message.Message):
+ answer = getattr(msg_or_dict, key, default)
+ elif isinstance(msg_or_dict, collections.abc.Mapping):
+ answer = msg_or_dict.get(key, default)
+ else:
+ raise TypeError(
+ "get() expected a dict or protobuf message, got {!r}.".format(
+ type(msg_or_dict)
+ )
+ )
+
+ # If the object we got back is our sentinel, raise KeyError; this is
+ # a "not found" case.
+ if answer is _SENTINEL:
+ raise KeyError(key)
+
+ # If a subkey exists, call this method recursively against the answer.
+ if subkey is not None and answer is not default:
+ return get(answer, subkey, default=default)
+
+ return answer
+
+
+def _set_field_on_message(msg, key, value):
+ """Set helper for protobuf Messages."""
+ # Attempt to set the value on the types of objects we know how to deal
+ # with.
+ if isinstance(value, (collections.abc.MutableSequence, tuple)):
+ # Clear the existing repeated protobuf message of any elements
+ # currently inside it.
+ while getattr(msg, key):
+ getattr(msg, key).pop()
+
+ # Write our new elements to the repeated field.
+ for item in value:
+ if isinstance(item, collections.abc.Mapping):
+ getattr(msg, key).add(**item)
+ else:
+ # protobuf's RepeatedCompositeContainer doesn't support
+ # append.
+ getattr(msg, key).extend([item])
+ elif isinstance(value, collections.abc.Mapping):
+ # Assign the dictionary values to the protobuf message.
+ for item_key, item_value in value.items():
+ set(getattr(msg, key), item_key, item_value)
+ elif isinstance(value, message.Message):
+ getattr(msg, key).CopyFrom(value)
+ else:
+ setattr(msg, key, value)
+
+
+def set(msg_or_dict, key, value):
+ """Set a key's value on a protobuf Message or dictionary.
+
+ Args:
+ msg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the
+ object.
+ key (str): The key to set.
+ value (Any): The value to set.
+
+ Raises:
+ TypeError: If ``msg_or_dict`` is not a Message or dictionary.
+ """
+ # Sanity check: Is our target object valid?
+ if not isinstance(msg_or_dict, (collections.abc.MutableMapping, message.Message)):
+ raise TypeError(
+ "set() expected a dict or protobuf message, got {!r}.".format(
+ type(msg_or_dict)
+ )
+ )
+
+ # We may be setting a nested key. Resolve this.
+ basekey, subkey = _resolve_subkeys(key)
+
+ # If a subkey exists, then get that object and call this method
+ # recursively against it using the subkey.
+ if subkey is not None:
+ if isinstance(msg_or_dict, collections.abc.MutableMapping):
+ msg_or_dict.setdefault(basekey, {})
+ set(get(msg_or_dict, basekey), subkey, value)
+ return
+
+ if isinstance(msg_or_dict, collections.abc.MutableMapping):
+ msg_or_dict[key] = value
+ else:
+ _set_field_on_message(msg_or_dict, key, value)
+
+
+def setdefault(msg_or_dict, key, value):
+ """Set the key on a protobuf Message or dictionary to a given value if the
+ current value is falsy.
+
+ Because protobuf Messages do not distinguish between unset values and
+ falsy ones particularly well (by design), this method treats any falsy
+ value (e.g. 0, empty list) as a target to be overwritten, on both Messages
+ and dictionaries.
+
+ Args:
+ msg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the
+ object.
+ key (str): The key on the object in question.
+ value (Any): The value to set.
+
+ Raises:
+ TypeError: If ``msg_or_dict`` is not a Message or dictionary.
+ """
+ if not get(msg_or_dict, key, default=None):
+ set(msg_or_dict, key, value)
+
+
+def field_mask(original, modified):
+ """Create a field mask by comparing two messages.
+
+ Args:
+ original (~google.protobuf.message.Message): the original message.
+ If set to None, this field will be interpreted as an empty
+ message.
+ modified (~google.protobuf.message.Message): the modified message.
+ If set to None, this field will be interpreted as an empty
+ message.
+
+ Returns:
+ google.protobuf.field_mask_pb2.FieldMask: field mask that contains
+ the list of field names that have different values between the two
+ messages. If the messages are equivalent, then the field mask is empty.
+
+ Raises:
+ ValueError: If the ``original`` or ``modified`` are not the same type.
+ """
+ if original is None and modified is None:
+ return field_mask_pb2.FieldMask()
+
+ if original is None and modified is not None:
+ original = copy.deepcopy(modified)
+ original.Clear()
+
+ if modified is None and original is not None:
+ modified = copy.deepcopy(original)
+ modified.Clear()
+
+ if not isinstance(original, type(modified)):
+ raise ValueError(
+ "expected that both original and modified should be of the "
+ 'same type, received "{!r}" and "{!r}".'.format(
+ type(original), type(modified)
+ )
+ )
+
+ return field_mask_pb2.FieldMask(paths=_field_mask_helper(original, modified))
+
+
+def _field_mask_helper(original, modified, current=""):
+ answer = []
+
+ for name in original.DESCRIPTOR.fields_by_name:
+ field_path = _get_path(current, name)
+
+ original_val = getattr(original, name)
+ modified_val = getattr(modified, name)
+
+ if _is_message(original_val) or _is_message(modified_val):
+ if original_val != modified_val:
+ # Wrapper types do not need to include the .value part of the
+ # path.
+ if _is_wrapper(original_val) or _is_wrapper(modified_val):
+ answer.append(field_path)
+ elif not modified_val.ListFields():
+ answer.append(field_path)
+ else:
+ answer.extend(
+ _field_mask_helper(original_val, modified_val, field_path)
+ )
+ else:
+ if original_val != modified_val:
+ answer.append(field_path)
+
+ return answer
+
+
+def _get_path(current, name):
+ # gapic-generator-python appends underscores to field names
+ # that collide with python keywords.
+ # `_` is stripped away as it is not possible to
+ # natively define a field with a trailing underscore in protobuf.
+ # APIs will reject field masks if fields have trailing underscores.
+ # See https://github.com/googleapis/python-api-core/issues/227
+ name = name.rstrip("_")
+ if not current:
+ return name
+ return "%s.%s" % (current, name)
+
+
+def _is_message(value):
+ return isinstance(value, message.Message)
+
+
+def _is_wrapper(value):
+ return type(value) in _WRAPPER_TYPES
diff --git a/Lib/site-packages/google/api_core/py.typed b/Lib/site-packages/google/api_core/py.typed
new file mode 100644
index 0000000..7e1a4a6
--- /dev/null
+++ b/Lib/site-packages/google/api_core/py.typed
@@ -0,0 +1,2 @@
+# Marker file for PEP 561.
+# The google-api-core package uses inline types.
diff --git a/Lib/site-packages/google/api_core/rest_helpers.py b/Lib/site-packages/google/api_core/rest_helpers.py
new file mode 100644
index 0000000..a78822f
--- /dev/null
+++ b/Lib/site-packages/google/api_core/rest_helpers.py
@@ -0,0 +1,109 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for rest transports."""
+
+import functools
+import operator
+
+
+def flatten_query_params(obj, strict=False):
+ """Flatten a dict into a list of (name,value) tuples.
+
+ The result is suitable for setting query params on an http request.
+
+ .. code-block:: python
+
+ >>> obj = {'a':
+ ... {'b':
+ ... {'c': ['x', 'y', 'z']} },
+ ... 'd': 'uvw',
+ ... 'e': True, }
+ >>> flatten_query_params(obj, strict=True)
+ [('a.b.c', 'x'), ('a.b.c', 'y'), ('a.b.c', 'z'), ('d', 'uvw'), ('e', 'true')]
+
+ Note that, as described in
+ https://github.com/googleapis/googleapis/blob/48d9fb8c8e287c472af500221c6450ecd45d7d39/google/api/http.proto#L117,
+ repeated fields (i.e. list-valued fields) may only contain primitive types (not lists or dicts).
+ This is enforced in this function.
+
+ Args:
+ obj: a possibly nested dictionary (from json), or None
+ strict: a bool, defaulting to False, to enforce that all values in the
+ result tuples be strings and, if boolean, lower-cased.
+
+ Returns: a list of tuples, with each tuple having a (possibly) multi-part name
+ and a scalar value.
+
+ Raises:
+ TypeError if obj is not a dict or None
+ ValueError if obj contains a list of non-primitive values.
+ """
+
+ if obj is not None and not isinstance(obj, dict):
+ raise TypeError("flatten_query_params must be called with dict object")
+
+ return _flatten(obj, key_path=[], strict=strict)
+
+
+def _flatten(obj, key_path, strict=False):
+ if obj is None:
+ return []
+ if isinstance(obj, dict):
+ return _flatten_dict(obj, key_path=key_path, strict=strict)
+ if isinstance(obj, list):
+ return _flatten_list(obj, key_path=key_path, strict=strict)
+ return _flatten_value(obj, key_path=key_path, strict=strict)
+
+
+def _is_primitive_value(obj):
+ if obj is None:
+ return False
+
+ if isinstance(obj, (list, dict)):
+ raise ValueError("query params may not contain repeated dicts or lists")
+
+ return True
+
+
+def _flatten_value(obj, key_path, strict=False):
+ return [(".".join(key_path), _canonicalize(obj, strict=strict))]
+
+
+def _flatten_dict(obj, key_path, strict=False):
+ items = (
+ _flatten(value, key_path=key_path + [key], strict=strict)
+ for key, value in obj.items()
+ )
+ return functools.reduce(operator.concat, items, [])
+
+
+def _flatten_list(elems, key_path, strict=False):
+ # Only lists of scalar values are supported.
+ # The name (key_path) is repeated for each value.
+ items = (
+ _flatten_value(elem, key_path=key_path, strict=strict)
+ for elem in elems
+ if _is_primitive_value(elem)
+ )
+ return functools.reduce(operator.concat, items, [])
+
+
+def _canonicalize(obj, strict=False):
+ if strict:
+ value = str(obj)
+ if isinstance(obj, bool):
+ value = value.lower()
+ return value
+ return obj
diff --git a/Lib/site-packages/google/api_core/rest_streaming.py b/Lib/site-packages/google/api_core/rest_streaming.py
new file mode 100644
index 0000000..f91381c
--- /dev/null
+++ b/Lib/site-packages/google/api_core/rest_streaming.py
@@ -0,0 +1,113 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for server-side streaming in REST."""
+
+from collections import deque
+import string
+from typing import Deque
+
+import requests
+
+
+class ResponseIterator:
+ """Iterator over REST API responses.
+
+ Args:
+ response (requests.Response): An API response object.
+ response_message_cls (Callable[proto.Message]): A proto
+ class expected to be returned from an API.
+ """
+
+ def __init__(self, response: requests.Response, response_message_cls):
+ self._response = response
+ self._response_message_cls = response_message_cls
+ # Inner iterator over HTTP response's content.
+ self._response_itr = self._response.iter_content(decode_unicode=True)
+ # Contains a list of JSON responses ready to be sent to user.
+ self._ready_objs: Deque[str] = deque()
+ # Current JSON response being built.
+ self._obj = ""
+ # Keeps track of the nesting level within a JSON object.
+ self._level = 0
+ # Keeps track whether HTTP response is currently sending values
+ # inside of a string value.
+ self._in_string = False
+ # Whether an escape symbol "\" was encountered.
+ self._escape_next = False
+
+ def cancel(self):
+ """Cancel existing streaming operation."""
+ self._response.close()
+
+ def _process_chunk(self, chunk: str):
+ if self._level == 0:
+ if chunk[0] != "[":
+ raise ValueError(
+ "Can only parse array of JSON objects, instead got %s" % chunk
+ )
+ for char in chunk:
+ if char == "{":
+ if self._level == 1:
+ # Level 1 corresponds to the outermost JSON object
+ # (i.e. the one we care about).
+ self._obj = ""
+ if not self._in_string:
+ self._level += 1
+ self._obj += char
+ elif char == "}":
+ self._obj += char
+ if not self._in_string:
+ self._level -= 1
+ if not self._in_string and self._level == 1:
+ self._ready_objs.append(self._obj)
+ elif char == '"':
+ # Helps to deal with an escaped quotes inside of a string.
+ if not self._escape_next:
+ self._in_string = not self._in_string
+ self._obj += char
+ elif char in string.whitespace:
+ if self._in_string:
+ self._obj += char
+ elif char == "[":
+ if self._level == 0:
+ self._level += 1
+ else:
+ self._obj += char
+ elif char == "]":
+ if self._level == 1:
+ self._level -= 1
+ else:
+ self._obj += char
+ else:
+ self._obj += char
+ self._escape_next = not self._escape_next if char == "\\" else False
+
+ def __next__(self):
+ while not self._ready_objs:
+ try:
+ chunk = next(self._response_itr)
+ self._process_chunk(chunk)
+ except StopIteration as e:
+ if self._level > 0:
+ raise ValueError("Unfinished stream: %s" % self._obj)
+ raise e
+ return self._grab()
+
+ def _grab(self):
+ # Add extra quotes to make json.loads happy.
+ return self._response_message_cls.from_json(self._ready_objs.popleft())
+
+ def __iter__(self):
+ return self
diff --git a/Lib/site-packages/google/api_core/retry/__init__.py b/Lib/site-packages/google/api_core/retry/__init__.py
new file mode 100644
index 0000000..1724fdb
--- /dev/null
+++ b/Lib/site-packages/google/api_core/retry/__init__.py
@@ -0,0 +1,52 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Retry implementation for Google API client libraries."""
+
+from .retry_base import exponential_sleep_generator
+from .retry_base import if_exception_type
+from .retry_base import if_transient_error
+from .retry_base import build_retry_error
+from .retry_base import RetryFailureReason
+from .retry_unary import Retry
+from .retry_unary import retry_target
+from .retry_unary_async import AsyncRetry
+from .retry_unary_async import retry_target as retry_target_async
+from .retry_streaming import StreamingRetry
+from .retry_streaming import retry_target_stream
+from .retry_streaming_async import AsyncStreamingRetry
+from .retry_streaming_async import retry_target_stream as retry_target_stream_async
+
+# The following imports are for backwards compatibility with https://github.com/googleapis/python-api-core/blob/4d7d2edee2c108d43deb151e6e0fdceb56b73275/google/api_core/retry.py
+#
+# TODO: Revert these imports on the next major version release (https://github.com/googleapis/python-api-core/issues/576)
+from google.api_core import datetime_helpers # noqa: F401
+from google.api_core import exceptions # noqa: F401
+from google.auth import exceptions as auth_exceptions # noqa: F401
+
+__all__ = (
+ "exponential_sleep_generator",
+ "if_exception_type",
+ "if_transient_error",
+ "build_retry_error",
+ "RetryFailureReason",
+ "Retry",
+ "AsyncRetry",
+ "StreamingRetry",
+ "AsyncStreamingRetry",
+ "retry_target",
+ "retry_target_async",
+ "retry_target_stream",
+ "retry_target_stream_async",
+)
diff --git a/Lib/site-packages/google/api_core/retry/retry_base.py b/Lib/site-packages/google/api_core/retry/retry_base.py
new file mode 100644
index 0000000..1606e0f
--- /dev/null
+++ b/Lib/site-packages/google/api_core/retry/retry_base.py
@@ -0,0 +1,361 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared classes and functions for retrying requests.
+
+:class:`_BaseRetry` is the base class for :class:`Retry`,
+:class:`AsyncRetry`, :class:`StreamingRetry`, and :class:`AsyncStreamingRetry`.
+"""
+
+from __future__ import annotations
+
+import logging
+import random
+import time
+
+from enum import Enum
+from typing import Any, Callable, Optional, TYPE_CHECKING
+
+import requests.exceptions
+
+from google.api_core import exceptions
+from google.auth import exceptions as auth_exceptions
+
+if TYPE_CHECKING:
+ import sys
+
+ if sys.version_info >= (3, 11):
+ from typing import Self
+ else:
+ from typing_extensions import Self
+
+_DEFAULT_INITIAL_DELAY = 1.0 # seconds
+_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds
+_DEFAULT_DELAY_MULTIPLIER = 2.0
+_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds
+
+_LOGGER = logging.getLogger("google.api_core.retry")
+
+
+def if_exception_type(
+ *exception_types: type[Exception],
+) -> Callable[[Exception], bool]:
+ """Creates a predicate to check if the exception is of a given type.
+
+ Args:
+ exception_types (Sequence[:func:`type`]): The exception types to check
+ for.
+
+ Returns:
+ Callable[Exception]: A predicate that returns True if the provided
+ exception is of the given type(s).
+ """
+
+ def if_exception_type_predicate(exception: Exception) -> bool:
+ """Bound predicate for checking an exception type."""
+ return isinstance(exception, exception_types)
+
+ return if_exception_type_predicate
+
+
+# pylint: disable=invalid-name
+# Pylint sees this as a constant, but it is also an alias that should be
+# considered a function.
+if_transient_error = if_exception_type(
+ exceptions.InternalServerError,
+ exceptions.TooManyRequests,
+ exceptions.ServiceUnavailable,
+ requests.exceptions.ConnectionError,
+ requests.exceptions.ChunkedEncodingError,
+ auth_exceptions.TransportError,
+)
+"""A predicate that checks if an exception is a transient API error.
+
+The following server errors are considered transient:
+
+- :class:`google.api_core.exceptions.InternalServerError` - HTTP 500, gRPC
+ ``INTERNAL(13)`` and its subclasses.
+- :class:`google.api_core.exceptions.TooManyRequests` - HTTP 429
+- :class:`google.api_core.exceptions.ServiceUnavailable` - HTTP 503
+- :class:`requests.exceptions.ConnectionError`
+- :class:`requests.exceptions.ChunkedEncodingError` - The server declared
+ chunked encoding but sent an invalid chunk.
+- :class:`google.auth.exceptions.TransportError` - Used to indicate an
+ error occurred during an HTTP request.
+"""
+# pylint: enable=invalid-name
+
+
+def exponential_sleep_generator(
+ initial: float, maximum: float, multiplier: float = _DEFAULT_DELAY_MULTIPLIER
+):
+ """Generates sleep intervals based on the exponential back-off algorithm.
+
+ This implements the `Truncated Exponential Back-off`_ algorithm.
+
+ .. _Truncated Exponential Back-off:
+ https://cloud.google.com/storage/docs/exponential-backoff
+
+ Args:
+ initial (float): The minimum amount of time to delay. This must
+ be greater than 0.
+ maximum (float): The maximum amount of time to delay.
+ multiplier (float): The multiplier applied to the delay.
+
+ Yields:
+ float: successive sleep intervals.
+ """
+ max_delay = min(initial, maximum)
+ while True:
+ yield random.uniform(0.0, max_delay)
+ max_delay = min(max_delay * multiplier, maximum)
+
+
+class RetryFailureReason(Enum):
+ """
+ The cause of a failed retry, used when building exceptions
+ """
+
+ TIMEOUT = 0
+ NON_RETRYABLE_ERROR = 1
+
+
+def build_retry_error(
+ exc_list: list[Exception],
+ reason: RetryFailureReason,
+ timeout_val: float | None,
+ **kwargs: Any,
+) -> tuple[Exception, Exception | None]:
+ """
+ Default exception_factory implementation.
+
+ Returns a RetryError if the failure is due to a timeout, otherwise
+ returns the last exception encountered.
+
+ Args:
+ - exc_list: list of exceptions that occurred during the retry
+ - reason: reason for the retry failure.
+ Can be TIMEOUT or NON_RETRYABLE_ERROR
+ - timeout_val: the original timeout value for the retry (in seconds), for use in the exception message
+
+ Returns:
+ - tuple: a tuple of the exception to be raised, and the cause exception if any
+ """
+ if reason == RetryFailureReason.TIMEOUT:
+ # return RetryError with the most recent exception as the cause
+ src_exc = exc_list[-1] if exc_list else None
+ timeout_val_str = f"of {timeout_val:0.1f}s " if timeout_val is not None else ""
+ return (
+ exceptions.RetryError(
+ f"Timeout {timeout_val_str}exceeded",
+ src_exc,
+ ),
+ src_exc,
+ )
+ elif exc_list:
+ # return most recent exception encountered
+ return exc_list[-1], None
+ else:
+ # no exceptions were given in exc_list. Raise generic RetryError
+ return exceptions.RetryError("Unknown error", None), None
+
+
+def _retry_error_helper(
+ exc: Exception,
+ deadline: float | None,
+ next_sleep: float,
+ error_list: list[Exception],
+ predicate_fn: Callable[[Exception], bool],
+ on_error_fn: Callable[[Exception], None] | None,
+ exc_factory_fn: Callable[
+ [list[Exception], RetryFailureReason, float | None],
+ tuple[Exception, Exception | None],
+ ],
+ original_timeout: float | None,
+):
+ """
+ Shared logic for handling an error for all retry implementations
+
+ - Raises an error on timeout or non-retryable error
+ - Calls on_error_fn if provided
+ - Logs the error
+
+ Args:
+ - exc: the exception that was raised
+ - deadline: the deadline for the retry, calculated as a diff from time.monotonic()
+ - next_sleep: the next sleep interval
+ - error_list: the list of exceptions that have been raised so far
+ - predicate_fn: takes `exc` and returns true if the operation should be retried
+ - on_error_fn: callback to execute when a retryable error occurs
+ - exc_factory_fn: callback used to build the exception to be raised on terminal failure
+ - original_timeout_val: the original timeout value for the retry (in seconds),
+ to be passed to the exception factory for building an error message
+ """
+ error_list.append(exc)
+ if not predicate_fn(exc):
+ final_exc, source_exc = exc_factory_fn(
+ error_list,
+ RetryFailureReason.NON_RETRYABLE_ERROR,
+ original_timeout,
+ )
+ raise final_exc from source_exc
+ if on_error_fn is not None:
+ on_error_fn(exc)
+ if deadline is not None and time.monotonic() + next_sleep > deadline:
+ final_exc, source_exc = exc_factory_fn(
+ error_list,
+ RetryFailureReason.TIMEOUT,
+ original_timeout,
+ )
+ raise final_exc from source_exc
+ _LOGGER.debug(
+ "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], next_sleep)
+ )
+
+
+class _BaseRetry(object):
+ """
+ Base class for retry configuration objects. This class is intended to capture retry
+ and backoff configuration that is common to both synchronous and asynchronous retries,
+ for both unary and streaming RPCs. It is not intended to be instantiated directly,
+ but rather to be subclassed by the various retry configuration classes.
+ """
+
+ def __init__(
+ self,
+ predicate: Callable[[Exception], bool] = if_transient_error,
+ initial: float = _DEFAULT_INITIAL_DELAY,
+ maximum: float = _DEFAULT_MAXIMUM_DELAY,
+ multiplier: float = _DEFAULT_DELAY_MULTIPLIER,
+ timeout: Optional[float] = _DEFAULT_DEADLINE,
+ on_error: Optional[Callable[[Exception], Any]] = None,
+ **kwargs: Any,
+ ) -> None:
+ self._predicate = predicate
+ self._initial = initial
+ self._multiplier = multiplier
+ self._maximum = maximum
+ self._timeout = kwargs.get("deadline", timeout)
+ self._deadline = self._timeout
+ self._on_error = on_error
+
+ def __call__(self, *args, **kwargs) -> Any:
+ raise NotImplementedError("Not implemented in base class")
+
+ @property
+ def deadline(self) -> float | None:
+ """
+ DEPRECATED: use ``timeout`` instead. Refer to the ``Retry`` class
+ documentation for details.
+ """
+ return self._timeout
+
+ @property
+ def timeout(self) -> float | None:
+ return self._timeout
+
+ def with_deadline(self, deadline: float | None) -> Self:
+ """Return a copy of this retry with the given timeout.
+
+ DEPRECATED: use :meth:`with_timeout` instead. Refer to the ``Retry`` class
+ documentation for details.
+
+ Args:
+ deadline (float|None): How long to keep retrying, in seconds. If None,
+ no timeout is enforced.
+
+ Returns:
+ Retry: A new retry instance with the given timeout.
+ """
+ return self.with_timeout(deadline)
+
+ def with_timeout(self, timeout: float | None) -> Self:
+ """Return a copy of this retry with the given timeout.
+
+ Args:
+ timeout (float): How long to keep retrying, in seconds. If None,
+ no timeout will be enforced.
+
+ Returns:
+ Retry: A new retry instance with the given timeout.
+ """
+ return type(self)(
+ predicate=self._predicate,
+ initial=self._initial,
+ maximum=self._maximum,
+ multiplier=self._multiplier,
+ timeout=timeout,
+ on_error=self._on_error,
+ )
+
+ def with_predicate(self, predicate: Callable[[Exception], bool]) -> Self:
+ """Return a copy of this retry with the given predicate.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return
+ ``True`` if the given exception is retryable.
+
+ Returns:
+ Retry: A new retry instance with the given predicate.
+ """
+ return type(self)(
+ predicate=predicate,
+ initial=self._initial,
+ maximum=self._maximum,
+ multiplier=self._multiplier,
+ timeout=self._timeout,
+ on_error=self._on_error,
+ )
+
+ def with_delay(
+ self,
+ initial: Optional[float] = None,
+ maximum: Optional[float] = None,
+ multiplier: Optional[float] = None,
+ ) -> Self:
+ """Return a copy of this retry with the given delay options.
+
+ Args:
+ initial (float): The minimum amount of time to delay (in seconds). This must
+ be greater than 0. If None, the current value is used.
+ maximum (float): The maximum amount of time to delay (in seconds). If None, the
+ current value is used.
+ multiplier (float): The multiplier applied to the delay. If None, the current
+ value is used.
+
+ Returns:
+ Retry: A new retry instance with the given delay options.
+ """
+ return type(self)(
+ predicate=self._predicate,
+ initial=initial if initial is not None else self._initial,
+ maximum=maximum if maximum is not None else self._maximum,
+ multiplier=multiplier if multiplier is not None else self._multiplier,
+ timeout=self._timeout,
+ on_error=self._on_error,
+ )
+
+ def __str__(self) -> str:
+ return (
+ "<{} predicate={}, initial={:.1f}, maximum={:.1f}, "
+ "multiplier={:.1f}, timeout={}, on_error={}>".format(
+ type(self).__name__,
+ self._predicate,
+ self._initial,
+ self._maximum,
+ self._multiplier,
+ self._timeout, # timeout can be None, thus no {:.1f}
+ self._on_error,
+ )
+ )
diff --git a/Lib/site-packages/google/api_core/retry/retry_streaming.py b/Lib/site-packages/google/api_core/retry/retry_streaming.py
new file mode 100644
index 0000000..e113323
--- /dev/null
+++ b/Lib/site-packages/google/api_core/retry/retry_streaming.py
@@ -0,0 +1,263 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Generator wrapper for retryable streaming RPCs.
+"""
+from __future__ import annotations
+
+from typing import (
+ Callable,
+ Optional,
+ List,
+ Tuple,
+ Iterable,
+ Generator,
+ TypeVar,
+ Any,
+ TYPE_CHECKING,
+)
+
+import sys
+import time
+import functools
+
+from google.api_core.retry.retry_base import _BaseRetry
+from google.api_core.retry.retry_base import _retry_error_helper
+from google.api_core.retry import exponential_sleep_generator
+from google.api_core.retry import build_retry_error
+from google.api_core.retry import RetryFailureReason
+
+if TYPE_CHECKING:
+ if sys.version_info >= (3, 10):
+ from typing import ParamSpec
+ else:
+ from typing_extensions import ParamSpec
+
+ _P = ParamSpec("_P") # target function call parameters
+ _Y = TypeVar("_Y") # yielded values
+
+
+def retry_target_stream(
+ target: Callable[_P, Iterable[_Y]],
+ predicate: Callable[[Exception], bool],
+ sleep_generator: Iterable[float],
+ timeout: Optional[float] = None,
+ on_error: Optional[Callable[[Exception], None]] = None,
+ exception_factory: Callable[
+ [List[Exception], RetryFailureReason, Optional[float]],
+ Tuple[Exception, Optional[Exception]],
+ ] = build_retry_error,
+ init_args: _P.args = (),
+ init_kwargs: _P.kwargs = {},
+ **kwargs,
+) -> Generator[_Y, Any, None]:
+ """Create a generator wrapper that retries the wrapped stream if it fails.
+
+ This is the lowest-level retry helper. Generally, you'll use the
+ higher-level retry helper :class:`Retry`.
+
+ Args:
+ target: The generator function to call and retry.
+ predicate: A callable used to determine if an
+ exception raised by the target should be considered retryable.
+ It should return True to retry or False otherwise.
+ sleep_generator: An infinite iterator that determines
+ how long to sleep between retries.
+ timeout: How long to keep retrying the target.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error: If given, the on_error callback will be called with each
+ retryable exception raised by the target. Any error raised by this
+ function will *not* be caught.
+ exception_factory: A function that is called when the retryable reaches
+ a terminal failure state, used to construct an exception to be raised.
+ It takes a list of all exceptions encountered, a retry.RetryFailureReason
+ enum indicating the failure cause, and the original timeout value
+ as arguments. It should return a tuple of the exception to be raised,
+ along with the cause exception if any. The default implementation will raise
+ a RetryError on timeout, or the last exception encountered otherwise.
+ init_args: Positional arguments to pass to the target function.
+ init_kwargs: Keyword arguments to pass to the target function.
+
+ Returns:
+ Generator: A retryable generator that wraps the target generator function.
+
+ Raises:
+ ValueError: If the sleep generator stops yielding values.
+ Exception: a custom exception specified by the exception_factory if provided.
+ If no exception_factory is provided:
+ google.api_core.RetryError: If the timeout is exceeded while retrying.
+ Exception: If the target raises an error that isn't retryable.
+ """
+
+ timeout = kwargs.get("deadline", timeout)
+ deadline: Optional[float] = (
+ time.monotonic() + timeout if timeout is not None else None
+ )
+ error_list: list[Exception] = []
+
+ for sleep in sleep_generator:
+ # Start a new retry loop
+ try:
+ # Note: in the future, we can add a ResumptionStrategy object
+ # to generate new args between calls. For now, use the same args
+ # for each attempt.
+ subgenerator = target(*init_args, **init_kwargs)
+ return (yield from subgenerator)
+ # handle exceptions raised by the subgenerator
+ # pylint: disable=broad-except
+ # This function explicitly must deal with broad exceptions.
+ except Exception as exc:
+ # defer to shared logic for handling errors
+ _retry_error_helper(
+ exc,
+ deadline,
+ sleep,
+ error_list,
+ predicate,
+ on_error,
+ exception_factory,
+ timeout,
+ )
+ # if exception not raised, sleep before next attempt
+ time.sleep(sleep)
+
+ raise ValueError("Sleep generator stopped yielding sleep values.")
+
+
+class StreamingRetry(_BaseRetry):
+ """Exponential retry decorator for streaming synchronous RPCs.
+
+ This class returns a Generator when called, which wraps the target
+ stream in retry logic. If any exception is raised by the target, the
+ entire stream will be retried within the wrapper.
+
+ Although the default behavior is to retry transient API errors, a
+ different predicate can be provided to retry other exceptions.
+
+ Important Note: when a stream encounters a retryable error, it will
+ silently construct a fresh iterator instance in the background
+ and continue yielding (likely duplicate) values as if no error occurred.
+ This is the most general way to retry a stream, but it often is not the
+ desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...]
+
+ There are two ways to build more advanced retry logic for streams:
+
+ 1. Wrap the target
+ Use a ``target`` that maintains state between retries, and creates a
+ different generator on each retry call. For example, you can wrap a
+ network call in a function that modifies the request based on what has
+ already been returned:
+
+ .. code-block:: python
+
+ def attempt_with_modified_request(target, request, seen_items=[]):
+ # remove seen items from request on each attempt
+ new_request = modify_request(request, seen_items)
+ new_generator = target(new_request)
+ for item in new_generator:
+ yield item
+ seen_items.append(item)
+
+ retry_wrapped_fn = StreamingRetry()(attempt_with_modified_request)
+ retryable_generator = retry_wrapped_fn(target, request)
+
+ 2. Wrap the retry generator
+ Alternatively, you can wrap the retryable generator itself before
+ passing it to the end-user to add a filter on the stream. For
+ example, you can keep track of the items that were successfully yielded
+ in previous retry attempts, and only yield new items when the
+ new attempt surpasses the previous ones:
+
+ .. code-block:: python
+
+ def retryable_with_filter(target):
+ stream_idx = 0
+ # reset stream_idx when the stream is retried
+ def on_error(e):
+ nonlocal stream_idx
+ stream_idx = 0
+ # build retryable
+ retryable_gen = StreamingRetry(...)(target)
+ # keep track of what has been yielded out of filter
+ seen_items = []
+ for item in retryable_gen():
+ if stream_idx >= len(seen_items):
+ seen_items.append(item)
+ yield item
+ elif item != seen_items[stream_idx]:
+ raise ValueError("Stream differs from last attempt")
+ stream_idx += 1
+
+ filter_retry_wrapped = retryable_with_filter(target)
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return ``True``
+ if the given exception is retryable.
+ initial (float): The minimum amount of time to delay in seconds. This
+ must be greater than 0.
+ maximum (float): The maximum amount of time to delay in seconds.
+ multiplier (float): The multiplier applied to the delay.
+ timeout (float): How long to keep retrying, in seconds.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Callable[Exception]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+ deadline (float): DEPRECATED: use `timeout` instead. For backward
+ compatibility, if specified it will override the ``timeout`` parameter.
+ """
+
+ def __call__(
+ self,
+ func: Callable[_P, Iterable[_Y]],
+ on_error: Callable[[Exception], Any] | None = None,
+ ) -> Callable[_P, Generator[_Y, Any, None]]:
+ """Wrap a callable with retry behavior.
+
+ Args:
+ func (Callable): The callable to add retry behavior to.
+ on_error (Optional[Callable[Exception]]): If given, the
+ on_error callback will be called with each retryable exception
+ raised by the wrapped function. Any error raised by this
+ function will *not* be caught. If on_error was specified in the
+ constructor, this value will be ignored.
+
+ Returns:
+ Callable: A callable that will invoke ``func`` with retry
+ behavior.
+ """
+ if self._on_error is not None:
+ on_error = self._on_error
+
+ @functools.wraps(func)
+ def retry_wrapped_func(
+ *args: _P.args, **kwargs: _P.kwargs
+ ) -> Generator[_Y, Any, None]:
+ """A wrapper that calls target function with retry."""
+ sleep_generator = exponential_sleep_generator(
+ self._initial, self._maximum, multiplier=self._multiplier
+ )
+ return retry_target_stream(
+ func,
+ predicate=self._predicate,
+ sleep_generator=sleep_generator,
+ timeout=self._timeout,
+ on_error=on_error,
+ init_args=args,
+ init_kwargs=kwargs,
+ )
+
+ return retry_wrapped_func
diff --git a/Lib/site-packages/google/api_core/retry/retry_streaming_async.py b/Lib/site-packages/google/api_core/retry/retry_streaming_async.py
new file mode 100644
index 0000000..2924ba1
--- /dev/null
+++ b/Lib/site-packages/google/api_core/retry/retry_streaming_async.py
@@ -0,0 +1,325 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Generator wrapper for retryable async streaming RPCs.
+"""
+from __future__ import annotations
+
+from typing import (
+ cast,
+ Any,
+ Callable,
+ Iterable,
+ AsyncIterator,
+ AsyncIterable,
+ Awaitable,
+ TypeVar,
+ AsyncGenerator,
+ TYPE_CHECKING,
+)
+
+import asyncio
+import time
+import sys
+import functools
+
+from google.api_core.retry.retry_base import _BaseRetry
+from google.api_core.retry.retry_base import _retry_error_helper
+from google.api_core.retry import exponential_sleep_generator
+from google.api_core.retry import build_retry_error
+from google.api_core.retry import RetryFailureReason
+
+
+if TYPE_CHECKING:
+ if sys.version_info >= (3, 10):
+ from typing import ParamSpec
+ else:
+ from typing_extensions import ParamSpec
+
+ _P = ParamSpec("_P") # target function call parameters
+ _Y = TypeVar("_Y") # yielded values
+
+
+async def retry_target_stream(
+ target: Callable[_P, AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]]],
+ predicate: Callable[[Exception], bool],
+ sleep_generator: Iterable[float],
+ timeout: float | None = None,
+ on_error: Callable[[Exception], None] | None = None,
+ exception_factory: Callable[
+ [list[Exception], RetryFailureReason, float | None],
+ tuple[Exception, Exception | None],
+ ] = build_retry_error,
+ init_args: _P.args = (),
+ init_kwargs: _P.kwargs = {},
+ **kwargs,
+) -> AsyncGenerator[_Y, None]:
+ """Create a generator wrapper that retries the wrapped stream if it fails.
+
+ This is the lowest-level retry helper. Generally, you'll use the
+ higher-level retry helper :class:`AsyncRetry`.
+
+ Args:
+ target: The generator function to call and retry.
+ predicate: A callable used to determine if an
+ exception raised by the target should be considered retryable.
+ It should return True to retry or False otherwise.
+ sleep_generator: An infinite iterator that determines
+ how long to sleep between retries.
+ timeout: How long to keep retrying the target.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error: If given, the on_error callback will be called with each
+ retryable exception raised by the target. Any error raised by this
+ function will *not* be caught.
+ exception_factory: A function that is called when the retryable reaches
+ a terminal failure state, used to construct an exception to be raised.
+ It takes a list of all exceptions encountered, a retry.RetryFailureReason
+ enum indicating the failure cause, and the original timeout value
+ as arguments. It should return a tuple of the exception to be raised,
+ along with the cause exception if any. The default implementation will raise
+ a RetryError on timeout, or the last exception encountered otherwise.
+ init_args: Positional arguments to pass to the target function.
+ init_kwargs: Keyword arguments to pass to the target function.
+
+ Returns:
+ AsyncGenerator: A retryable generator that wraps the target generator function.
+
+ Raises:
+ ValueError: If the sleep generator stops yielding values.
+ Exception: a custom exception specified by the exception_factory if provided.
+ If no exception_factory is provided:
+ google.api_core.RetryError: If the timeout is exceeded while retrying.
+ Exception: If the target raises an error that isn't retryable.
+ """
+ target_iterator: AsyncIterator[_Y] | None = None
+ timeout = kwargs.get("deadline", timeout)
+ deadline = time.monotonic() + timeout if timeout else None
+ # keep track of retryable exceptions we encounter to pass in to exception_factory
+ error_list: list[Exception] = []
+ target_is_generator: bool | None = None
+
+ for sleep in sleep_generator:
+ # Start a new retry loop
+ try:
+ # Note: in the future, we can add a ResumptionStrategy object
+ # to generate new args between calls. For now, use the same args
+ # for each attempt.
+ target_output: AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]] = target(
+ *init_args, **init_kwargs
+ )
+ try:
+ # gapic functions return the generator behind an awaitable
+ # unwrap the awaitable so we can work with the generator directly
+ target_output = await target_output # type: ignore
+ except TypeError:
+ # was not awaitable, continue
+ pass
+ target_iterator = cast(AsyncIterable["_Y"], target_output).__aiter__()
+
+ if target_is_generator is None:
+ # Check if target supports generator features (asend, athrow, aclose)
+ target_is_generator = bool(getattr(target_iterator, "asend", None))
+
+ sent_in = None
+ while True:
+ ## Read from target_iterator
+ # If the target is a generator, we will advance it with `asend`
+ # otherwise, we will use `anext`
+ if target_is_generator:
+ next_value = await target_iterator.asend(sent_in) # type: ignore
+ else:
+ next_value = await target_iterator.__anext__()
+ ## Yield from Wrapper to caller
+ try:
+ # yield latest value from target
+ # exceptions from `athrow` and `aclose` are injected here
+ sent_in = yield next_value
+ except GeneratorExit:
+ # if wrapper received `aclose` while waiting on yield,
+ # it will raise GeneratorExit here
+ if target_is_generator:
+ # pass to inner target_iterator for handling
+ await cast(AsyncGenerator["_Y", None], target_iterator).aclose()
+ else:
+ raise
+ return
+ except: # noqa: E722
+ # bare except catches any exception passed to `athrow`
+ if target_is_generator:
+ # delegate error handling to target_iterator
+ await cast(AsyncGenerator["_Y", None], target_iterator).athrow(
+ cast(BaseException, sys.exc_info()[1])
+ )
+ else:
+ raise
+ return
+ except StopAsyncIteration:
+ # if iterator exhausted, return
+ return
+ # handle exceptions raised by the target_iterator
+ # pylint: disable=broad-except
+ # This function explicitly must deal with broad exceptions.
+ except Exception as exc:
+ # defer to shared logic for handling errors
+ _retry_error_helper(
+ exc,
+ deadline,
+ sleep,
+ error_list,
+ predicate,
+ on_error,
+ exception_factory,
+ timeout,
+ )
+ # if exception not raised, sleep before next attempt
+ await asyncio.sleep(sleep)
+ finally:
+ if target_is_generator and target_iterator is not None:
+ await cast(AsyncGenerator["_Y", None], target_iterator).aclose()
+ raise ValueError("Sleep generator stopped yielding sleep values.")
+
+
+class AsyncStreamingRetry(_BaseRetry):
+ """Exponential retry decorator for async streaming rpcs.
+
+ This class returns an AsyncGenerator when called, which wraps the target
+ stream in retry logic. If any exception is raised by the target, the
+ entire stream will be retried within the wrapper.
+
+ Although the default behavior is to retry transient API errors, a
+ different predicate can be provided to retry other exceptions.
+
+ Important Note: when a stream is encounters a retryable error, it will
+ silently construct a fresh iterator instance in the background
+ and continue yielding (likely duplicate) values as if no error occurred.
+ This is the most general way to retry a stream, but it often is not the
+ desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...]
+
+ There are two ways to build more advanced retry logic for streams:
+
+ 1. Wrap the target
+ Use a ``target`` that maintains state between retries, and creates a
+ different generator on each retry call. For example, you can wrap a
+ grpc call in a function that modifies the request based on what has
+ already been returned:
+
+ .. code-block:: python
+
+ async def attempt_with_modified_request(target, request, seen_items=[]):
+ # remove seen items from request on each attempt
+ new_request = modify_request(request, seen_items)
+ new_generator = await target(new_request)
+ async for item in new_generator:
+ yield item
+ seen_items.append(item)
+
+ retry_wrapped = AsyncRetry(is_stream=True,...)(attempt_with_modified_request, target, request, [])
+
+ 2. Wrap the retry generator
+ Alternatively, you can wrap the retryable generator itself before
+ passing it to the end-user to add a filter on the stream. For
+ example, you can keep track of the items that were successfully yielded
+ in previous retry attempts, and only yield new items when the
+ new attempt surpasses the previous ones:
+
+ .. code-block:: python
+
+ async def retryable_with_filter(target):
+ stream_idx = 0
+ # reset stream_idx when the stream is retried
+ def on_error(e):
+ nonlocal stream_idx
+ stream_idx = 0
+ # build retryable
+ retryable_gen = AsyncRetry(is_stream=True, ...)(target)
+ # keep track of what has been yielded out of filter
+ seen_items = []
+ async for item in retryable_gen:
+ if stream_idx >= len(seen_items):
+ yield item
+ seen_items.append(item)
+ elif item != previous_stream[stream_idx]:
+ raise ValueError("Stream differs from last attempt")"
+ stream_idx += 1
+
+ filter_retry_wrapped = retryable_with_filter(target)
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return ``True``
+ if the given exception is retryable.
+ initial (float): The minimum amount of time to delay in seconds. This
+ must be greater than 0.
+ maximum (float): The maximum amount of time to delay in seconds.
+ multiplier (float): The multiplier applied to the delay.
+ timeout (Optional[float]): How long to keep retrying in seconds.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Optional[Callable[Exception]]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+ is_stream (bool): Indicates whether the input function
+ should be treated as a stream function (i.e. an AsyncGenerator,
+ or function or coroutine that returns an AsyncIterable).
+ If True, the iterable will be wrapped with retry logic, and any
+ failed outputs will restart the stream. If False, only the input
+ function call itself will be retried. Defaults to False.
+ To avoid duplicate values, retryable streams should typically be
+ wrapped in additional filter logic before use.
+ deadline (float): DEPRECATED use ``timeout`` instead. If set it will
+ override ``timeout`` parameter.
+ """
+
+ def __call__(
+ self,
+ func: Callable[..., AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]]],
+ on_error: Callable[[Exception], Any] | None = None,
+ ) -> Callable[_P, Awaitable[AsyncGenerator[_Y, None]]]:
+ """Wrap a callable with retry behavior.
+
+ Args:
+ func (Callable): The callable or stream to add retry behavior to.
+ on_error (Optional[Callable[Exception]]): If given, the
+ on_error callback will be called with each retryable exception
+ raised by the wrapped function. Any error raised by this
+ function will *not* be caught. If on_error was specified in the
+ constructor, this value will be ignored.
+
+ Returns:
+ Callable: A callable that will invoke ``func`` with retry
+ behavior.
+ """
+ if self._on_error is not None:
+ on_error = self._on_error
+
+ @functools.wraps(func)
+ async def retry_wrapped_func(
+ *args: _P.args, **kwargs: _P.kwargs
+ ) -> AsyncGenerator[_Y, None]:
+ """A wrapper that calls target function with retry."""
+ sleep_generator = exponential_sleep_generator(
+ self._initial, self._maximum, multiplier=self._multiplier
+ )
+ return retry_target_stream(
+ func,
+ self._predicate,
+ sleep_generator,
+ self._timeout,
+ on_error,
+ init_args=args,
+ init_kwargs=kwargs,
+ )
+
+ return retry_wrapped_func
diff --git a/Lib/site-packages/google/api_core/retry/retry_unary.py b/Lib/site-packages/google/api_core/retry/retry_unary.py
new file mode 100644
index 0000000..ab1b403
--- /dev/null
+++ b/Lib/site-packages/google/api_core/retry/retry_unary.py
@@ -0,0 +1,301 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for retrying functions with exponential back-off.
+
+The :class:`Retry` decorator can be used to retry functions that raise
+exceptions using exponential backoff. Because a exponential sleep algorithm is
+used, the retry is limited by a `timeout`. The timeout determines the window
+in which retries will be attempted. This is used instead of total number of retries
+because it is difficult to ascertain the amount of time a function can block
+when using total number of retries and exponential backoff.
+
+By default, this decorator will retry transient
+API errors (see :func:`if_transient_error`). For example:
+
+.. code-block:: python
+
+ @retry.Retry()
+ def call_flaky_rpc():
+ return client.flaky_rpc()
+
+ # Will retry flaky_rpc() if it raises transient API errors.
+ result = call_flaky_rpc()
+
+You can pass a custom predicate to retry on different exceptions, such as
+waiting for an eventually consistent item to be available:
+
+.. code-block:: python
+
+ @retry.Retry(predicate=if_exception_type(exceptions.NotFound))
+ def check_if_exists():
+ return client.does_thing_exist()
+
+ is_available = check_if_exists()
+
+Some client library methods apply retry automatically. These methods can accept
+a ``retry`` parameter that allows you to configure the behavior:
+
+.. code-block:: python
+
+ my_retry = retry.Retry(timeout=60)
+ result = client.some_method(retry=my_retry)
+
+"""
+
+from __future__ import annotations
+
+import functools
+import sys
+import time
+import inspect
+import warnings
+from typing import Any, Callable, Iterable, TypeVar, TYPE_CHECKING
+
+from google.api_core.retry.retry_base import _BaseRetry
+from google.api_core.retry.retry_base import _retry_error_helper
+from google.api_core.retry.retry_base import exponential_sleep_generator
+from google.api_core.retry.retry_base import build_retry_error
+from google.api_core.retry.retry_base import RetryFailureReason
+
+
+if TYPE_CHECKING:
+ if sys.version_info >= (3, 10):
+ from typing import ParamSpec
+ else:
+ from typing_extensions import ParamSpec
+
+ _P = ParamSpec("_P") # target function call parameters
+ _R = TypeVar("_R") # target function returned value
+
+_ASYNC_RETRY_WARNING = "Using the synchronous google.api_core.retry.Retry with asynchronous calls may lead to unexpected results. Please use google.api_core.retry_async.AsyncRetry instead."
+
+
+def retry_target(
+ target: Callable[_P, _R],
+ predicate: Callable[[Exception], bool],
+ sleep_generator: Iterable[float],
+ timeout: float | None = None,
+ on_error: Callable[[Exception], None] | None = None,
+ exception_factory: Callable[
+ [list[Exception], RetryFailureReason, float | None],
+ tuple[Exception, Exception | None],
+ ] = build_retry_error,
+ **kwargs,
+):
+ """Call a function and retry if it fails.
+
+ This is the lowest-level retry helper. Generally, you'll use the
+ higher-level retry helper :class:`Retry`.
+
+ Args:
+ target(Callable): The function to call and retry. This must be a
+ nullary function - apply arguments with `functools.partial`.
+ predicate (Callable[Exception]): A callable used to determine if an
+ exception raised by the target should be considered retryable.
+ It should return True to retry or False otherwise.
+ sleep_generator (Iterable[float]): An infinite iterator that determines
+ how long to sleep between retries.
+ timeout (Optional[float]): How long to keep retrying the target.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Optional[Callable[Exception]]): If given, the on_error
+ callback will be called with each retryable exception raised by the
+ target. Any error raised by this function will *not* be caught.
+ exception_factory: A function that is called when the retryable reaches
+ a terminal failure state, used to construct an exception to be raised.
+ It takes a list of all exceptions encountered, a retry.RetryFailureReason
+ enum indicating the failure cause, and the original timeout value
+ as arguments. It should return a tuple of the exception to be raised,
+ along with the cause exception if any. The default implementation will raise
+ a RetryError on timeout, or the last exception encountered otherwise.
+ deadline (float): DEPRECATED: use ``timeout`` instead. For backward
+ compatibility, if specified it will override ``timeout`` parameter.
+
+ Returns:
+ Any: the return value of the target function.
+
+ Raises:
+ ValueError: If the sleep generator stops yielding values.
+ Exception: a custom exception specified by the exception_factory if provided.
+ If no exception_factory is provided:
+ google.api_core.RetryError: If the timeout is exceeded while retrying.
+ Exception: If the target raises an error that isn't retryable.
+ """
+
+ timeout = kwargs.get("deadline", timeout)
+
+ deadline = time.monotonic() + timeout if timeout is not None else None
+ error_list: list[Exception] = []
+
+ for sleep in sleep_generator:
+ try:
+ result = target()
+ if inspect.isawaitable(result):
+ warnings.warn(_ASYNC_RETRY_WARNING)
+ return result
+
+ # pylint: disable=broad-except
+ # This function explicitly must deal with broad exceptions.
+ except Exception as exc:
+ # defer to shared logic for handling errors
+ _retry_error_helper(
+ exc,
+ deadline,
+ sleep,
+ error_list,
+ predicate,
+ on_error,
+ exception_factory,
+ timeout,
+ )
+ # if exception not raised, sleep before next attempt
+ time.sleep(sleep)
+
+ raise ValueError("Sleep generator stopped yielding sleep values.")
+
+
+class Retry(_BaseRetry):
+ """Exponential retry decorator for unary synchronous RPCs.
+
+ This class is a decorator used to add retry or polling behavior to an RPC
+ call.
+
+ Although the default behavior is to retry transient API errors, a
+ different predicate can be provided to retry other exceptions.
+
+ There are two important concepts that retry/polling behavior may operate on,
+ Deadline and Timeout, which need to be properly defined for the correct
+ usage of this class and the rest of the library.
+
+ Deadline: a fixed point in time by which a certain operation must
+ terminate. For example, if a certain operation has a deadline
+ "2022-10-18T23:30:52.123Z" it must terminate (successfully or with an
+ error) by that time, regardless of when it was started or whether it
+ was started at all.
+
+ Timeout: the maximum duration of time after which a certain operation
+ must terminate (successfully or with an error). The countdown begins right
+ after an operation was started. For example, if an operation was started at
+ 09:24:00 with timeout of 75 seconds, it must terminate no later than
+ 09:25:15.
+
+ Unfortunately, in the past this class (and the api-core library as a whole) has not
+ been properly distinguishing the concepts of "timeout" and "deadline", and the
+ ``deadline`` parameter has meant ``timeout``. That is why
+ ``deadline`` has been deprecated and ``timeout`` should be used instead. If the
+ ``deadline`` parameter is set, it will override the ``timeout`` parameter.
+ In other words, ``retry.deadline`` should be treated as just a deprecated alias for
+ ``retry.timeout``.
+
+ Said another way, it is safe to assume that this class and the rest of this
+ library operate in terms of timeouts (not deadlines) unless explicitly
+ noted the usage of deadline semantics.
+
+ It is also important to
+ understand the three most common applications of the Timeout concept in the
+ context of this library.
+
+ Usually the generic Timeout term may stand for one of the following actual
+ timeouts: RPC Timeout, Retry Timeout, or Polling Timeout.
+
+ RPC Timeout: a value supplied by the client to the server so
+ that the server side knows the maximum amount of time it is expected to
+ spend handling that specific RPC. For example, in the case of gRPC transport,
+ RPC Timeout is represented by setting "grpc-timeout" header in the HTTP2
+ request. The `timeout` property of this class normally never represents the
+ RPC Timeout as it is handled separately by the ``google.api_core.timeout``
+ module of this library.
+
+ Retry Timeout: this is the most common meaning of the ``timeout`` property
+ of this class, and defines how long a certain RPC may be retried in case
+ the server returns an error.
+
+ Polling Timeout: defines how long the
+ client side is allowed to call the polling RPC repeatedly to check a status of a
+ long-running operation. Each polling RPC is
+ expected to succeed (its errors are supposed to be handled by the retry
+ logic). The decision as to whether a new polling attempt needs to be made is based
+ not on the RPC status code but on the status of the returned
+ status of an operation. In other words: we will poll a long-running operation until
+ the operation is done or the polling timeout expires. Each poll will inform us of
+ the status of the operation. The poll consists of an RPC to the server that may
+ itself be retried as per the poll-specific retry settings in case of errors. The
+ operation-level retry settings do NOT apply to polling-RPC retries.
+
+ With the actual timeout types being defined above, the client libraries
+ often refer to just Timeout without clarifying which type specifically
+ that is. In that case the actual timeout type (sometimes also referred to as
+ Logical Timeout) can be determined from the context. If it is a unary rpc
+ call (i.e. a regular one) Timeout usually stands for the RPC Timeout (if
+ provided directly as a standalone value) or Retry Timeout (if provided as
+ ``retry.timeout`` property of the unary RPC's retry config). For
+ ``Operation`` or ``PollingFuture`` in general Timeout stands for
+ Polling Timeout.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return ``True``
+ if the given exception is retryable.
+ initial (float): The minimum amount of time to delay in seconds. This
+ must be greater than 0.
+ maximum (float): The maximum amount of time to delay in seconds.
+ multiplier (float): The multiplier applied to the delay.
+ timeout (Optional[float]): How long to keep retrying, in seconds.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Callable[Exception]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+ deadline (float): DEPRECATED: use `timeout` instead. For backward
+ compatibility, if specified it will override the ``timeout`` parameter.
+ """
+
+ def __call__(
+ self,
+ func: Callable[_P, _R],
+ on_error: Callable[[Exception], Any] | None = None,
+ ) -> Callable[_P, _R]:
+ """Wrap a callable with retry behavior.
+
+ Args:
+ func (Callable): The callable to add retry behavior to.
+ on_error (Optional[Callable[Exception]]): If given, the
+ on_error callback will be called with each retryable exception
+ raised by the wrapped function. Any error raised by this
+ function will *not* be caught. If on_error was specified in the
+ constructor, this value will be ignored.
+
+ Returns:
+ Callable: A callable that will invoke ``func`` with retry
+ behavior.
+ """
+ if self._on_error is not None:
+ on_error = self._on_error
+
+ @functools.wraps(func)
+ def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R:
+ """A wrapper that calls target function with retry."""
+ target = functools.partial(func, *args, **kwargs)
+ sleep_generator = exponential_sleep_generator(
+ self._initial, self._maximum, multiplier=self._multiplier
+ )
+ return retry_target(
+ target,
+ self._predicate,
+ sleep_generator,
+ timeout=self._timeout,
+ on_error=on_error,
+ )
+
+ return retry_wrapped_func
diff --git a/Lib/site-packages/google/api_core/retry/retry_unary_async.py b/Lib/site-packages/google/api_core/retry/retry_unary_async.py
new file mode 100644
index 0000000..3bdf6c7
--- /dev/null
+++ b/Lib/site-packages/google/api_core/retry/retry_unary_async.py
@@ -0,0 +1,238 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for retrying coroutine functions with exponential back-off.
+
+The :class:`AsyncRetry` decorator shares most functionality and behavior with
+:class:`Retry`, but supports coroutine functions. Please refer to description
+of :class:`Retry` for more details.
+
+By default, this decorator will retry transient
+API errors (see :func:`if_transient_error`). For example:
+
+.. code-block:: python
+
+ @retry_async.AsyncRetry()
+ async def call_flaky_rpc():
+ return await client.flaky_rpc()
+
+ # Will retry flaky_rpc() if it raises transient API errors.
+ result = await call_flaky_rpc()
+
+You can pass a custom predicate to retry on different exceptions, such as
+waiting for an eventually consistent item to be available:
+
+.. code-block:: python
+
+ @retry_async.AsyncRetry(predicate=retry_async.if_exception_type(exceptions.NotFound))
+ async def check_if_exists():
+ return await client.does_thing_exist()
+
+ is_available = await check_if_exists()
+
+Some client library methods apply retry automatically. These methods can accept
+a ``retry`` parameter that allows you to configure the behavior:
+
+.. code-block:: python
+
+ my_retry = retry_async.AsyncRetry(timeout=60)
+ result = await client.some_method(retry=my_retry)
+
+"""
+
+from __future__ import annotations
+
+import asyncio
+import time
+import functools
+from typing import (
+ Awaitable,
+ Any,
+ Callable,
+ Iterable,
+ TypeVar,
+ TYPE_CHECKING,
+)
+
+from google.api_core.retry.retry_base import _BaseRetry
+from google.api_core.retry.retry_base import _retry_error_helper
+from google.api_core.retry.retry_base import exponential_sleep_generator
+from google.api_core.retry.retry_base import build_retry_error
+from google.api_core.retry.retry_base import RetryFailureReason
+
+# for backwards compatibility, expose helpers in this module
+from google.api_core.retry.retry_base import if_exception_type # noqa
+from google.api_core.retry.retry_base import if_transient_error # noqa
+
+if TYPE_CHECKING:
+ import sys
+
+ if sys.version_info >= (3, 10):
+ from typing import ParamSpec
+ else:
+ from typing_extensions import ParamSpec
+
+ _P = ParamSpec("_P") # target function call parameters
+ _R = TypeVar("_R") # target function returned value
+
+_DEFAULT_INITIAL_DELAY = 1.0 # seconds
+_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds
+_DEFAULT_DELAY_MULTIPLIER = 2.0
+_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds
+_DEFAULT_TIMEOUT = 60.0 * 2.0 # seconds
+
+
+async def retry_target(
+ target: Callable[_P, Awaitable[_R]],
+ predicate: Callable[[Exception], bool],
+ sleep_generator: Iterable[float],
+ timeout: float | None = None,
+ on_error: Callable[[Exception], None] | None = None,
+ exception_factory: Callable[
+ [list[Exception], RetryFailureReason, float | None],
+ tuple[Exception, Exception | None],
+ ] = build_retry_error,
+ **kwargs,
+):
+ """Await a coroutine and retry if it fails.
+
+ This is the lowest-level retry helper. Generally, you'll use the
+ higher-level retry helper :class:`Retry`.
+
+ Args:
+ target(Callable[[], Any]): The function to call and retry. This must be a
+ nullary function - apply arguments with `functools.partial`.
+ predicate (Callable[Exception]): A callable used to determine if an
+ exception raised by the target should be considered retryable.
+ It should return True to retry or False otherwise.
+ sleep_generator (Iterable[float]): An infinite iterator that determines
+ how long to sleep between retries.
+ timeout (Optional[float]): How long to keep retrying the target, in seconds.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Optional[Callable[Exception]]): If given, the on_error
+ callback will be called with each retryable exception raised by the
+ target. Any error raised by this function will *not* be caught.
+ exception_factory: A function that is called when the retryable reaches
+ a terminal failure state, used to construct an exception to be raised.
+ It takes a list of all exceptions encountered, a retry.RetryFailureReason
+ enum indicating the failure cause, and the original timeout value
+ as arguments. It should return a tuple of the exception to be raised,
+ along with the cause exception if any. The default implementation will raise
+ a RetryError on timeout, or the last exception encountered otherwise.
+ deadline (float): DEPRECATED use ``timeout`` instead. For backward
+ compatibility, if set it will override the ``timeout`` parameter.
+
+ Returns:
+ Any: the return value of the target function.
+
+ Raises:
+ ValueError: If the sleep generator stops yielding values.
+ Exception: a custom exception specified by the exception_factory if provided.
+ If no exception_factory is provided:
+ google.api_core.RetryError: If the timeout is exceeded while retrying.
+ Exception: If the target raises an error that isn't retryable.
+ """
+
+ timeout = kwargs.get("deadline", timeout)
+
+ deadline = time.monotonic() + timeout if timeout is not None else None
+ error_list: list[Exception] = []
+
+ for sleep in sleep_generator:
+ try:
+ return await target()
+ # pylint: disable=broad-except
+ # This function explicitly must deal with broad exceptions.
+ except Exception as exc:
+ # defer to shared logic for handling errors
+ _retry_error_helper(
+ exc,
+ deadline,
+ sleep,
+ error_list,
+ predicate,
+ on_error,
+ exception_factory,
+ timeout,
+ )
+ # if exception not raised, sleep before next attempt
+ await asyncio.sleep(sleep)
+
+ raise ValueError("Sleep generator stopped yielding sleep values.")
+
+
+class AsyncRetry(_BaseRetry):
+ """Exponential retry decorator for async coroutines.
+
+ This class is a decorator used to add exponential back-off retry behavior
+ to an RPC call.
+
+ Although the default behavior is to retry transient API errors, a
+ different predicate can be provided to retry other exceptions.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return ``True``
+ if the given exception is retryable.
+ initial (float): The minimum amount of time to delay in seconds. This
+ must be greater than 0.
+ maximum (float): The maximum amount of time to delay in seconds.
+ multiplier (float): The multiplier applied to the delay.
+ timeout (Optional[float]): How long to keep retrying in seconds.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Optional[Callable[Exception]]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+ deadline (float): DEPRECATED use ``timeout`` instead. If set it will
+ override ``timeout`` parameter.
+ """
+
+ def __call__(
+ self,
+ func: Callable[..., Awaitable[_R]],
+ on_error: Callable[[Exception], Any] | None = None,
+ ) -> Callable[_P, Awaitable[_R]]:
+ """Wrap a callable with retry behavior.
+
+ Args:
+ func (Callable): The callable or stream to add retry behavior to.
+ on_error (Optional[Callable[Exception]]): If given, the
+ on_error callback will be called with each retryable exception
+ raised by the wrapped function. Any error raised by this
+ function will *not* be caught. If on_error was specified in the
+ constructor, this value will be ignored.
+
+ Returns:
+ Callable: A callable that will invoke ``func`` with retry
+ behavior.
+ """
+ if self._on_error is not None:
+ on_error = self._on_error
+
+ @functools.wraps(func)
+ async def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R:
+ """A wrapper that calls target function with retry."""
+ sleep_generator = exponential_sleep_generator(
+ self._initial, self._maximum, multiplier=self._multiplier
+ )
+ return await retry_target(
+ functools.partial(func, *args, **kwargs),
+ predicate=self._predicate,
+ sleep_generator=sleep_generator,
+ timeout=self._timeout,
+ on_error=on_error,
+ )
+
+ return retry_wrapped_func
diff --git a/Lib/site-packages/google/api_core/retry_async.py b/Lib/site-packages/google/api_core/retry_async.py
new file mode 100644
index 0000000..90a2d5a
--- /dev/null
+++ b/Lib/site-packages/google/api_core/retry_async.py
@@ -0,0 +1,34 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# The following imports are for backwards compatibility with https://github.com/googleapis/python-api-core/blob/4d7d2edee2c108d43deb151e6e0fdceb56b73275/google/api_core/retry_async.py
+#
+# TODO: Revert these imports on the next major version release (https://github.com/googleapis/python-api-core/issues/576)
+from google.api_core import datetime_helpers # noqa: F401
+from google.api_core import exceptions # noqa: F401
+from google.api_core.retry import exponential_sleep_generator # noqa: F401
+from google.api_core.retry import if_exception_type # noqa: F401
+from google.api_core.retry import if_transient_error # noqa: F401
+from google.api_core.retry.retry_unary_async import AsyncRetry
+from google.api_core.retry.retry_unary_async import retry_target
+
+__all__ = (
+ "AsyncRetry",
+ "datetime_helpers",
+ "exceptions",
+ "exponential_sleep_generator",
+ "if_exception_type",
+ "if_transient_error",
+ "retry_target",
+)
diff --git a/Lib/site-packages/google/api_core/timeout.py b/Lib/site-packages/google/api_core/timeout.py
new file mode 100644
index 0000000..868e3e9
--- /dev/null
+++ b/Lib/site-packages/google/api_core/timeout.py
@@ -0,0 +1,284 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Decorators for applying timeout arguments to functions.
+
+These decorators are used to wrap API methods to apply either a
+Deadline-dependent (recommended), constant (DEPRECATED) or exponential
+(DEPRECATED) timeout argument.
+
+For example, imagine an API method that can take a while to return results,
+such as one that might block until a resource is ready:
+
+.. code-block:: python
+
+ def is_thing_ready(timeout=None):
+ response = requests.get('https://example.com/is_thing_ready')
+ response.raise_for_status()
+ return response.json()
+
+This module allows a function like this to be wrapped so that timeouts are
+automatically determined, for example:
+
+.. code-block:: python
+
+ timeout_ = timeout.ExponentialTimeout()
+ is_thing_ready_with_timeout = timeout_(is_thing_ready)
+
+ for n in range(10):
+ try:
+ is_thing_ready_with_timeout({'example': 'data'})
+ except:
+ pass
+
+In this example the first call to ``is_thing_ready`` will have a relatively
+small timeout (like 1 second). If the resource is available and the request
+completes quickly, the loop exits. But, if the resource isn't yet available
+and the request times out, it'll be retried - this time with a larger timeout.
+
+In the broader context these decorators are typically combined with
+:mod:`google.api_core.retry` to implement API methods with a signature that
+matches ``api_method(request, timeout=None, retry=None)``.
+"""
+
+from __future__ import unicode_literals
+
+import datetime
+import functools
+
+from google.api_core import datetime_helpers
+
+_DEFAULT_INITIAL_TIMEOUT = 5.0 # seconds
+_DEFAULT_MAXIMUM_TIMEOUT = 30.0 # seconds
+_DEFAULT_TIMEOUT_MULTIPLIER = 2.0
+# If specified, must be in seconds. If none, deadline is not used in the
+# timeout calculation.
+_DEFAULT_DEADLINE = None
+
+
+class TimeToDeadlineTimeout(object):
+ """A decorator that decreases timeout set for an RPC based on how much time
+ has left till its deadline. The deadline is calculated as
+ ``now + initial_timeout`` when this decorator is first called for an rpc.
+
+ In other words this decorator implements deadline semantics in terms of a
+ sequence of decreasing timeouts t0 > t1 > t2 ... tn >= 0.
+
+ Args:
+ timeout (Optional[float]): the timeout (in seconds) to applied to the
+ wrapped function. If `None`, the target function is expected to
+ never timeout.
+ """
+
+ def __init__(self, timeout=None, clock=datetime_helpers.utcnow):
+ self._timeout = timeout
+ self._clock = clock
+
+ def __call__(self, func):
+ """Apply the timeout decorator.
+
+ Args:
+ func (Callable): The function to apply the timeout argument to.
+ This function must accept a timeout keyword argument.
+
+ Returns:
+ Callable: The wrapped function.
+ """
+
+ first_attempt_timestamp = self._clock().timestamp()
+
+ @functools.wraps(func)
+ def func_with_timeout(*args, **kwargs):
+ """Wrapped function that adds timeout."""
+
+ remaining_timeout = self._timeout
+ if remaining_timeout is not None:
+ # All calculations are in seconds
+ now_timestamp = self._clock().timestamp()
+
+ # To avoid usage of nonlocal but still have round timeout
+ # numbers for first attempt (in most cases the only attempt made
+ # for an RPC.
+ if now_timestamp - first_attempt_timestamp < 0.001:
+ now_timestamp = first_attempt_timestamp
+
+ time_since_first_attempt = now_timestamp - first_attempt_timestamp
+ # Avoid setting negative timeout
+ kwargs["timeout"] = max(0, self._timeout - time_since_first_attempt)
+
+ return func(*args, **kwargs)
+
+ return func_with_timeout
+
+ def __str__(self):
+ return "".format(self._timeout)
+
+
+class ConstantTimeout(object):
+ """A decorator that adds a constant timeout argument.
+
+ DEPRECATED: use ``TimeToDeadlineTimeout`` instead.
+
+ This is effectively equivalent to
+ ``functools.partial(func, timeout=timeout)``.
+
+ Args:
+ timeout (Optional[float]): the timeout (in seconds) to applied to the
+ wrapped function. If `None`, the target function is expected to
+ never timeout.
+ """
+
+ def __init__(self, timeout=None):
+ self._timeout = timeout
+
+ def __call__(self, func):
+ """Apply the timeout decorator.
+
+ Args:
+ func (Callable): The function to apply the timeout argument to.
+ This function must accept a timeout keyword argument.
+
+ Returns:
+ Callable: The wrapped function.
+ """
+
+ @functools.wraps(func)
+ def func_with_timeout(*args, **kwargs):
+ """Wrapped function that adds timeout."""
+ kwargs["timeout"] = self._timeout
+ return func(*args, **kwargs)
+
+ return func_with_timeout
+
+ def __str__(self):
+ return "".format(self._timeout)
+
+
+def _exponential_timeout_generator(initial, maximum, multiplier, deadline):
+ """A generator that yields exponential timeout values.
+
+ Args:
+ initial (float): The initial timeout.
+ maximum (float): The maximum timeout.
+ multiplier (float): The multiplier applied to the timeout.
+ deadline (float): The overall deadline across all invocations.
+
+ Yields:
+ float: A timeout value.
+ """
+ if deadline is not None:
+ deadline_datetime = datetime_helpers.utcnow() + datetime.timedelta(
+ seconds=deadline
+ )
+ else:
+ deadline_datetime = datetime.datetime.max
+
+ timeout = initial
+ while True:
+ now = datetime_helpers.utcnow()
+ yield min(
+ # The calculated timeout based on invocations.
+ timeout,
+ # The set maximum timeout.
+ maximum,
+ # The remaining time before the deadline is reached.
+ float((deadline_datetime - now).seconds),
+ )
+ timeout = timeout * multiplier
+
+
+class ExponentialTimeout(object):
+ """A decorator that adds an exponentially increasing timeout argument.
+
+ DEPRECATED: the concept of incrementing timeout exponentially has been
+ deprecated. Use ``TimeToDeadlineTimeout`` instead.
+
+ This is useful if a function is called multiple times. Each time the
+ function is called this decorator will calculate a new timeout parameter
+ based on the the number of times the function has been called.
+
+ For example
+
+ .. code-block:: python
+
+ Args:
+ initial (float): The initial timeout to pass.
+ maximum (float): The maximum timeout for any one call.
+ multiplier (float): The multiplier applied to the timeout for each
+ invocation.
+ deadline (Optional[float]): The overall deadline across all
+ invocations. This is used to prevent a very large calculated
+ timeout from pushing the overall execution time over the deadline.
+ This is especially useful in conjunction with
+ :mod:`google.api_core.retry`. If ``None``, the timeouts will not
+ be adjusted to accommodate an overall deadline.
+ """
+
+ def __init__(
+ self,
+ initial=_DEFAULT_INITIAL_TIMEOUT,
+ maximum=_DEFAULT_MAXIMUM_TIMEOUT,
+ multiplier=_DEFAULT_TIMEOUT_MULTIPLIER,
+ deadline=_DEFAULT_DEADLINE,
+ ):
+ self._initial = initial
+ self._maximum = maximum
+ self._multiplier = multiplier
+ self._deadline = deadline
+
+ def with_deadline(self, deadline):
+ """Return a copy of this timeout with the given deadline.
+
+ Args:
+ deadline (float): The overall deadline across all invocations.
+
+ Returns:
+ ExponentialTimeout: A new instance with the given deadline.
+ """
+ return ExponentialTimeout(
+ initial=self._initial,
+ maximum=self._maximum,
+ multiplier=self._multiplier,
+ deadline=deadline,
+ )
+
+ def __call__(self, func):
+ """Apply the timeout decorator.
+
+ Args:
+ func (Callable): The function to apply the timeout argument to.
+ This function must accept a timeout keyword argument.
+
+ Returns:
+ Callable: The wrapped function.
+ """
+ timeouts = _exponential_timeout_generator(
+ self._initial, self._maximum, self._multiplier, self._deadline
+ )
+
+ @functools.wraps(func)
+ def func_with_timeout(*args, **kwargs):
+ """Wrapped function that adds timeout."""
+ kwargs["timeout"] = next(timeouts)
+ return func(*args, **kwargs)
+
+ return func_with_timeout
+
+ def __str__(self):
+ return (
+ "".format(
+ self._initial, self._maximum, self._multiplier, self._deadline
+ )
+ )
diff --git a/Lib/site-packages/google/api_core/version.py b/Lib/site-packages/google/api_core/version.py
new file mode 100644
index 0000000..422b383
--- /dev/null
+++ b/Lib/site-packages/google/api_core/version.py
@@ -0,0 +1,15 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__version__ = "2.17.0"
diff --git a/Lib/site-packages/google/auth/__init__.py b/Lib/site-packages/google/auth/__init__.py
new file mode 100644
index 0000000..765bbd7
--- /dev/null
+++ b/Lib/site-packages/google/auth/__init__.py
@@ -0,0 +1,53 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google Auth Library for Python."""
+
+import logging
+import sys
+import warnings
+
+from google.auth import version as google_auth_version
+from google.auth._default import (
+ default,
+ load_credentials_from_dict,
+ load_credentials_from_file,
+)
+
+
+__version__ = google_auth_version.__version__
+
+
+__all__ = ["default", "load_credentials_from_file", "load_credentials_from_dict"]
+
+
+class Python37DeprecationWarning(DeprecationWarning): # pragma: NO COVER
+ """
+ Deprecation warning raised when Python 3.7 runtime is detected.
+ Python 3.7 support will be dropped after January 1, 2024.
+ """
+
+ pass
+
+
+# Checks if the current runtime is Python 3.7.
+if sys.version_info.major == 3 and sys.version_info.minor == 7: # pragma: NO COVER
+ message = (
+ "After January 1, 2024, new releases of this library will drop support "
+ "for Python 3.7."
+ )
+ warnings.warn(message, Python37DeprecationWarning)
+
+# Set default logging handler to avoid "No handler found" warnings.
+logging.getLogger(__name__).addHandler(logging.NullHandler())
diff --git a/Lib/site-packages/google/auth/_cloud_sdk.py b/Lib/site-packages/google/auth/_cloud_sdk.py
new file mode 100644
index 0000000..a944119
--- /dev/null
+++ b/Lib/site-packages/google/auth/_cloud_sdk.py
@@ -0,0 +1,153 @@
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for reading the Google Cloud SDK's configuration."""
+
+import os
+import subprocess
+
+from google.auth import _helpers
+from google.auth import environment_vars
+from google.auth import exceptions
+
+
+# The ~/.config subdirectory containing gcloud credentials.
+_CONFIG_DIRECTORY = "gcloud"
+# Windows systems store config at %APPDATA%\gcloud
+_WINDOWS_CONFIG_ROOT_ENV_VAR = "APPDATA"
+# The name of the file in the Cloud SDK config that contains default
+# credentials.
+_CREDENTIALS_FILENAME = "application_default_credentials.json"
+# The name of the Cloud SDK shell script
+_CLOUD_SDK_POSIX_COMMAND = "gcloud"
+_CLOUD_SDK_WINDOWS_COMMAND = "gcloud.cmd"
+# The command to get the Cloud SDK configuration
+_CLOUD_SDK_CONFIG_GET_PROJECT_COMMAND = ("config", "get", "project")
+# The command to get google user access token
+_CLOUD_SDK_USER_ACCESS_TOKEN_COMMAND = ("auth", "print-access-token")
+# Cloud SDK's application-default client ID
+CLOUD_SDK_CLIENT_ID = (
+ "764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com"
+)
+
+
+def get_config_path():
+ """Returns the absolute path the the Cloud SDK's configuration directory.
+
+ Returns:
+ str: The Cloud SDK config path.
+ """
+ # If the path is explicitly set, return that.
+ try:
+ return os.environ[environment_vars.CLOUD_SDK_CONFIG_DIR]
+ except KeyError:
+ pass
+
+ # Non-windows systems store this at ~/.config/gcloud
+ if os.name != "nt":
+ return os.path.join(os.path.expanduser("~"), ".config", _CONFIG_DIRECTORY)
+ # Windows systems store config at %APPDATA%\gcloud
+ else:
+ try:
+ return os.path.join(
+ os.environ[_WINDOWS_CONFIG_ROOT_ENV_VAR], _CONFIG_DIRECTORY
+ )
+ except KeyError:
+ # This should never happen unless someone is really
+ # messing with things, but we'll cover the case anyway.
+ drive = os.environ.get("SystemDrive", "C:")
+ return os.path.join(drive, "\\", _CONFIG_DIRECTORY)
+
+
+def get_application_default_credentials_path():
+ """Gets the path to the application default credentials file.
+
+ The path may or may not exist.
+
+ Returns:
+ str: The full path to application default credentials.
+ """
+ config_path = get_config_path()
+ return os.path.join(config_path, _CREDENTIALS_FILENAME)
+
+
+def _run_subprocess_ignore_stderr(command):
+ """ Return subprocess.check_output with the given command and ignores stderr."""
+ with open(os.devnull, "w") as devnull:
+ output = subprocess.check_output(command, stderr=devnull)
+ return output
+
+
+def get_project_id():
+ """Gets the project ID from the Cloud SDK.
+
+ Returns:
+ Optional[str]: The project ID.
+ """
+ if os.name == "nt":
+ command = _CLOUD_SDK_WINDOWS_COMMAND
+ else:
+ command = _CLOUD_SDK_POSIX_COMMAND
+
+ try:
+ # Ignore the stderr coming from gcloud, so it won't be mixed into the output.
+ # https://github.com/googleapis/google-auth-library-python/issues/673
+ project = _run_subprocess_ignore_stderr(
+ (command,) + _CLOUD_SDK_CONFIG_GET_PROJECT_COMMAND
+ )
+
+ # Turn bytes into a string and remove "\n"
+ project = _helpers.from_bytes(project).strip()
+ return project if project else None
+ except (subprocess.CalledProcessError, OSError, IOError):
+ return None
+
+
+def get_auth_access_token(account=None):
+ """Load user access token with the ``gcloud auth print-access-token`` command.
+
+ Args:
+ account (Optional[str]): Account to get the access token for. If not
+ specified, the current active account will be used.
+
+ Returns:
+ str: The user access token.
+
+ Raises:
+ google.auth.exceptions.UserAccessTokenError: if failed to get access
+ token from gcloud.
+ """
+ if os.name == "nt":
+ command = _CLOUD_SDK_WINDOWS_COMMAND
+ else:
+ command = _CLOUD_SDK_POSIX_COMMAND
+
+ try:
+ if account:
+ command = (
+ (command,)
+ + _CLOUD_SDK_USER_ACCESS_TOKEN_COMMAND
+ + ("--account=" + account,)
+ )
+ else:
+ command = (command,) + _CLOUD_SDK_USER_ACCESS_TOKEN_COMMAND
+
+ access_token = subprocess.check_output(command, stderr=subprocess.STDOUT)
+ # remove the trailing "\n"
+ return access_token.decode("utf-8").strip()
+ except (subprocess.CalledProcessError, OSError, IOError) as caught_exc:
+ new_exc = exceptions.UserAccessTokenError(
+ "Failed to obtain access token", caught_exc
+ )
+ raise new_exc from caught_exc
diff --git a/Lib/site-packages/google/auth/_credentials_async.py b/Lib/site-packages/google/auth/_credentials_async.py
new file mode 100644
index 0000000..760758d
--- /dev/null
+++ b/Lib/site-packages/google/auth/_credentials_async.py
@@ -0,0 +1,171 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Interfaces for credentials."""
+
+import abc
+import inspect
+
+from google.auth import credentials
+
+
+class Credentials(credentials.Credentials, metaclass=abc.ABCMeta):
+ """Async inherited credentials class from google.auth.credentials.
+ The added functionality is the before_request call which requires
+ async/await syntax.
+ All credentials have a :attr:`token` that is used for authentication and
+ may also optionally set an :attr:`expiry` to indicate when the token will
+ no longer be valid.
+
+ Most credentials will be :attr:`invalid` until :meth:`refresh` is called.
+ Credentials can do this automatically before the first HTTP request in
+ :meth:`before_request`.
+
+ Although the token and expiration will change as the credentials are
+ :meth:`refreshed ` and used, credentials should be considered
+ immutable. Various credentials will accept configuration such as private
+ keys, scopes, and other options. These options are not changeable after
+ construction. Some classes will provide mechanisms to copy the credentials
+ with modifications such as :meth:`ScopedCredentials.with_scopes`.
+ """
+
+ async def before_request(self, request, method, url, headers):
+ """Performs credential-specific before request logic.
+
+ Refreshes the credentials if necessary, then calls :meth:`apply` to
+ apply the token to the authentication header.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ method (str): The request's HTTP method or the RPC method being
+ invoked.
+ url (str): The request's URI or the RPC service's URI.
+ headers (Mapping): The request's headers.
+ """
+ # pylint: disable=unused-argument
+ # (Subclasses may use these arguments to ascertain information about
+ # the http request.)
+
+ if not self.valid:
+ if inspect.iscoroutinefunction(self.refresh):
+ await self.refresh(request)
+ else:
+ self.refresh(request)
+ self.apply(headers)
+
+
+class CredentialsWithQuotaProject(credentials.CredentialsWithQuotaProject):
+ """Abstract base for credentials supporting ``with_quota_project`` factory"""
+
+
+class AnonymousCredentials(credentials.AnonymousCredentials, Credentials):
+ """Credentials that do not provide any authentication information.
+
+ These are useful in the case of services that support anonymous access or
+ local service emulators that do not use credentials. This class inherits
+ from the sync anonymous credentials file, but is kept if async credentials
+ is initialized and we would like anonymous credentials.
+ """
+
+
+class ReadOnlyScoped(credentials.ReadOnlyScoped, metaclass=abc.ABCMeta):
+ """Interface for credentials whose scopes can be queried.
+
+ OAuth 2.0-based credentials allow limiting access using scopes as described
+ in `RFC6749 Section 3.3`_.
+ If a credential class implements this interface then the credentials either
+ use scopes in their implementation.
+
+ Some credentials require scopes in order to obtain a token. You can check
+ if scoping is necessary with :attr:`requires_scopes`::
+
+ if credentials.requires_scopes:
+ # Scoping is required.
+ credentials = _credentials_async.with_scopes(scopes=['one', 'two'])
+
+ Credentials that require scopes must either be constructed with scopes::
+
+ credentials = SomeScopedCredentials(scopes=['one', 'two'])
+
+ Or must copy an existing instance using :meth:`with_scopes`::
+
+ scoped_credentials = _credentials_async.with_scopes(scopes=['one', 'two'])
+
+ Some credentials have scopes but do not allow or require scopes to be set,
+ these credentials can be used as-is.
+
+ .. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
+ """
+
+
+class Scoped(credentials.Scoped):
+ """Interface for credentials whose scopes can be replaced while copying.
+
+ OAuth 2.0-based credentials allow limiting access using scopes as described
+ in `RFC6749 Section 3.3`_.
+ If a credential class implements this interface then the credentials either
+ use scopes in their implementation.
+
+ Some credentials require scopes in order to obtain a token. You can check
+ if scoping is necessary with :attr:`requires_scopes`::
+
+ if credentials.requires_scopes:
+ # Scoping is required.
+ credentials = _credentials_async.create_scoped(['one', 'two'])
+
+ Credentials that require scopes must either be constructed with scopes::
+
+ credentials = SomeScopedCredentials(scopes=['one', 'two'])
+
+ Or must copy an existing instance using :meth:`with_scopes`::
+
+ scoped_credentials = credentials.with_scopes(scopes=['one', 'two'])
+
+ Some credentials have scopes but do not allow or require scopes to be set,
+ these credentials can be used as-is.
+
+ .. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
+ """
+
+
+def with_scopes_if_required(credentials, scopes):
+ """Creates a copy of the credentials with scopes if scoping is required.
+
+ This helper function is useful when you do not know (or care to know) the
+ specific type of credentials you are using (such as when you use
+ :func:`google.auth.default`). This function will call
+ :meth:`Scoped.with_scopes` if the credentials are scoped credentials and if
+ the credentials require scoping. Otherwise, it will return the credentials
+ as-is.
+
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials to
+ scope if necessary.
+ scopes (Sequence[str]): The list of scopes to use.
+
+ Returns:
+ google.auth._credentials_async.Credentials: Either a new set of scoped
+ credentials, or the passed in credentials instance if no scoping
+ was required.
+ """
+ if isinstance(credentials, Scoped) and credentials.requires_scopes:
+ return credentials.with_scopes(scopes)
+ else:
+ return credentials
+
+
+class Signing(credentials.Signing, metaclass=abc.ABCMeta):
+ """Interface for credentials that can cryptographically sign messages."""
diff --git a/Lib/site-packages/google/auth/_default.py b/Lib/site-packages/google/auth/_default.py
new file mode 100644
index 0000000..63009df
--- /dev/null
+++ b/Lib/site-packages/google/auth/_default.py
@@ -0,0 +1,691 @@
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Application default credentials.
+
+Implements application default credentials and project ID detection.
+"""
+
+import io
+import json
+import logging
+import os
+import warnings
+
+from google.auth import environment_vars
+from google.auth import exceptions
+import google.auth.transport._http_client
+
+_LOGGER = logging.getLogger(__name__)
+
+# Valid types accepted for file-based credentials.
+_AUTHORIZED_USER_TYPE = "authorized_user"
+_SERVICE_ACCOUNT_TYPE = "service_account"
+_EXTERNAL_ACCOUNT_TYPE = "external_account"
+_EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = "external_account_authorized_user"
+_IMPERSONATED_SERVICE_ACCOUNT_TYPE = "impersonated_service_account"
+_GDCH_SERVICE_ACCOUNT_TYPE = "gdch_service_account"
+_VALID_TYPES = (
+ _AUTHORIZED_USER_TYPE,
+ _SERVICE_ACCOUNT_TYPE,
+ _EXTERNAL_ACCOUNT_TYPE,
+ _EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE,
+ _IMPERSONATED_SERVICE_ACCOUNT_TYPE,
+ _GDCH_SERVICE_ACCOUNT_TYPE,
+)
+
+# Help message when no credentials can be found.
+_CLOUD_SDK_MISSING_CREDENTIALS = """\
+Your default credentials were not found. To set up Application Default Credentials, \
+see https://cloud.google.com/docs/authentication/external/set-up-adc for more information.\
+"""
+
+# Warning when using Cloud SDK user credentials
+_CLOUD_SDK_CREDENTIALS_WARNING = """\
+Your application has authenticated using end user credentials from Google \
+Cloud SDK without a quota project. You might receive a "quota exceeded" \
+or "API not enabled" error. See the following page for troubleshooting: \
+https://cloud.google.com/docs/authentication/adc-troubleshooting/user-creds. \
+"""
+
+# The subject token type used for AWS external_account credentials.
+_AWS_SUBJECT_TOKEN_TYPE = "urn:ietf:params:aws:token-type:aws4_request"
+
+
+def _warn_about_problematic_credentials(credentials):
+ """Determines if the credentials are problematic.
+
+ Credentials from the Cloud SDK that are associated with Cloud SDK's project
+ are problematic because they may not have APIs enabled and have limited
+ quota. If this is the case, warn about it.
+ """
+ from google.auth import _cloud_sdk
+
+ if credentials.client_id == _cloud_sdk.CLOUD_SDK_CLIENT_ID:
+ warnings.warn(_CLOUD_SDK_CREDENTIALS_WARNING)
+
+
+def load_credentials_from_file(
+ filename, scopes=None, default_scopes=None, quota_project_id=None, request=None
+):
+ """Loads Google credentials from a file.
+
+ The credentials file must be a service account key, stored authorized
+ user credentials, external account credentials, or impersonated service
+ account credentials.
+
+ Args:
+ filename (str): The full path to the credentials file.
+ scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
+ specified, the credentials will automatically be scoped if
+ necessary
+ default_scopes (Optional[Sequence[str]]): Default scopes passed by a
+ Google client library. Use 'scopes' for user-defined scopes.
+ quota_project_id (Optional[str]): The project ID used for
+ quota and billing.
+ request (Optional[google.auth.transport.Request]): An object used to make
+ HTTP requests. This is used to determine the associated project ID
+ for a workload identity pool resource (external account credentials).
+ If not specified, then it will use a
+ google.auth.transport.requests.Request client to make requests.
+
+ Returns:
+ Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
+ credentials and the project ID. Authorized user credentials do not
+ have the project ID information. External account credentials project
+ IDs may not always be determined.
+
+ Raises:
+ google.auth.exceptions.DefaultCredentialsError: if the file is in the
+ wrong format or is missing.
+ """
+ if not os.path.exists(filename):
+ raise exceptions.DefaultCredentialsError(
+ "File {} was not found.".format(filename)
+ )
+
+ with io.open(filename, "r") as file_obj:
+ try:
+ info = json.load(file_obj)
+ except ValueError as caught_exc:
+ new_exc = exceptions.DefaultCredentialsError(
+ "File {} is not a valid json file.".format(filename), caught_exc
+ )
+ raise new_exc from caught_exc
+ return _load_credentials_from_info(
+ filename, info, scopes, default_scopes, quota_project_id, request
+ )
+
+
+def load_credentials_from_dict(
+ info, scopes=None, default_scopes=None, quota_project_id=None, request=None
+):
+ """Loads Google credentials from a dict.
+
+ The credentials file must be a service account key, stored authorized
+ user credentials, external account credentials, or impersonated service
+ account credentials.
+
+ Args:
+ info (Dict[str, Any]): A dict object containing the credentials
+ scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
+ specified, the credentials will automatically be scoped if
+ necessary
+ default_scopes (Optional[Sequence[str]]): Default scopes passed by a
+ Google client library. Use 'scopes' for user-defined scopes.
+ quota_project_id (Optional[str]): The project ID used for
+ quota and billing.
+ request (Optional[google.auth.transport.Request]): An object used to make
+ HTTP requests. This is used to determine the associated project ID
+ for a workload identity pool resource (external account credentials).
+ If not specified, then it will use a
+ google.auth.transport.requests.Request client to make requests.
+
+ Returns:
+ Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
+ credentials and the project ID. Authorized user credentials do not
+ have the project ID information. External account credentials project
+ IDs may not always be determined.
+
+ Raises:
+ google.auth.exceptions.DefaultCredentialsError: if the file is in the
+ wrong format or is missing.
+ """
+ if not isinstance(info, dict):
+ raise exceptions.DefaultCredentialsError(
+ "info object was of type {} but dict type was expected.".format(type(info))
+ )
+
+ return _load_credentials_from_info(
+ "dict object", info, scopes, default_scopes, quota_project_id, request
+ )
+
+
+def _load_credentials_from_info(
+ filename, info, scopes, default_scopes, quota_project_id, request
+):
+ from google.auth.credentials import CredentialsWithQuotaProject
+
+ credential_type = info.get("type")
+
+ if credential_type == _AUTHORIZED_USER_TYPE:
+ credentials, project_id = _get_authorized_user_credentials(
+ filename, info, scopes
+ )
+
+ elif credential_type == _SERVICE_ACCOUNT_TYPE:
+ credentials, project_id = _get_service_account_credentials(
+ filename, info, scopes, default_scopes
+ )
+
+ elif credential_type == _EXTERNAL_ACCOUNT_TYPE:
+ credentials, project_id = _get_external_account_credentials(
+ info,
+ filename,
+ scopes=scopes,
+ default_scopes=default_scopes,
+ request=request,
+ )
+
+ elif credential_type == _EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE:
+ credentials, project_id = _get_external_account_authorized_user_credentials(
+ filename, info, request
+ )
+
+ elif credential_type == _IMPERSONATED_SERVICE_ACCOUNT_TYPE:
+ credentials, project_id = _get_impersonated_service_account_credentials(
+ filename, info, scopes
+ )
+ elif credential_type == _GDCH_SERVICE_ACCOUNT_TYPE:
+ credentials, project_id = _get_gdch_service_account_credentials(filename, info)
+ else:
+ raise exceptions.DefaultCredentialsError(
+ "The file {file} does not have a valid type. "
+ "Type is {type}, expected one of {valid_types}.".format(
+ file=filename, type=credential_type, valid_types=_VALID_TYPES
+ )
+ )
+ if isinstance(credentials, CredentialsWithQuotaProject):
+ credentials = _apply_quota_project_id(credentials, quota_project_id)
+ return credentials, project_id
+
+
+def _get_gcloud_sdk_credentials(quota_project_id=None):
+ """Gets the credentials and project ID from the Cloud SDK."""
+ from google.auth import _cloud_sdk
+
+ _LOGGER.debug("Checking Cloud SDK credentials as part of auth process...")
+
+ # Check if application default credentials exist.
+ credentials_filename = _cloud_sdk.get_application_default_credentials_path()
+
+ if not os.path.isfile(credentials_filename):
+ _LOGGER.debug("Cloud SDK credentials not found on disk; not using them")
+ return None, None
+
+ credentials, project_id = load_credentials_from_file(
+ credentials_filename, quota_project_id=quota_project_id
+ )
+
+ if not project_id:
+ project_id = _cloud_sdk.get_project_id()
+
+ return credentials, project_id
+
+
+def _get_explicit_environ_credentials(quota_project_id=None):
+ """Gets credentials from the GOOGLE_APPLICATION_CREDENTIALS environment
+ variable."""
+ from google.auth import _cloud_sdk
+
+ cloud_sdk_adc_path = _cloud_sdk.get_application_default_credentials_path()
+ explicit_file = os.environ.get(environment_vars.CREDENTIALS)
+
+ _LOGGER.debug(
+ "Checking %s for explicit credentials as part of auth process...", explicit_file
+ )
+
+ if explicit_file is not None and explicit_file == cloud_sdk_adc_path:
+ # Cloud sdk flow calls gcloud to fetch project id, so if the explicit
+ # file path is cloud sdk credentials path, then we should fall back
+ # to cloud sdk flow, otherwise project id cannot be obtained.
+ _LOGGER.debug(
+ "Explicit credentials path %s is the same as Cloud SDK credentials path, fall back to Cloud SDK credentials flow...",
+ explicit_file,
+ )
+ return _get_gcloud_sdk_credentials(quota_project_id=quota_project_id)
+
+ if explicit_file is not None:
+ credentials, project_id = load_credentials_from_file(
+ os.environ[environment_vars.CREDENTIALS], quota_project_id=quota_project_id
+ )
+
+ return credentials, project_id
+
+ else:
+ return None, None
+
+
+def _get_gae_credentials():
+ """Gets Google App Engine App Identity credentials and project ID."""
+ # If not GAE gen1, prefer the metadata service even if the GAE APIs are
+ # available as per https://google.aip.dev/auth/4115.
+ if os.environ.get(environment_vars.LEGACY_APPENGINE_RUNTIME) != "python27":
+ return None, None
+
+ # While this library is normally bundled with app_engine, there are
+ # some cases where it's not available, so we tolerate ImportError.
+ try:
+ _LOGGER.debug("Checking for App Engine runtime as part of auth process...")
+ import google.auth.app_engine as app_engine
+ except ImportError:
+ _LOGGER.warning("Import of App Engine auth library failed.")
+ return None, None
+
+ try:
+ credentials = app_engine.Credentials()
+ project_id = app_engine.get_project_id()
+ return credentials, project_id
+ except EnvironmentError:
+ _LOGGER.debug(
+ "No App Engine library was found so cannot authentication via App Engine Identity Credentials."
+ )
+ return None, None
+
+
+def _get_gce_credentials(request=None, quota_project_id=None):
+ """Gets credentials and project ID from the GCE Metadata Service."""
+ # Ping requires a transport, but we want application default credentials
+ # to require no arguments. So, we'll use the _http_client transport which
+ # uses http.client. This is only acceptable because the metadata server
+ # doesn't do SSL and never requires proxies.
+
+ # While this library is normally bundled with compute_engine, there are
+ # some cases where it's not available, so we tolerate ImportError.
+ try:
+ from google.auth import compute_engine
+ from google.auth.compute_engine import _metadata
+ except ImportError:
+ _LOGGER.warning("Import of Compute Engine auth library failed.")
+ return None, None
+
+ if request is None:
+ request = google.auth.transport._http_client.Request()
+
+ if _metadata.is_on_gce(request=request):
+ # Get the project ID.
+ try:
+ project_id = _metadata.get_project_id(request=request)
+ except exceptions.TransportError:
+ project_id = None
+
+ cred = compute_engine.Credentials()
+ cred = _apply_quota_project_id(cred, quota_project_id)
+
+ return cred, project_id
+ else:
+ _LOGGER.warning(
+ "Authentication failed using Compute Engine authentication due to unavailable metadata server."
+ )
+ return None, None
+
+
+def _get_external_account_credentials(
+ info, filename, scopes=None, default_scopes=None, request=None
+):
+ """Loads external account Credentials from the parsed external account info.
+
+ The credentials information must correspond to a supported external account
+ credentials.
+
+ Args:
+ info (Mapping[str, str]): The external account info in Google format.
+ filename (str): The full path to the credentials file.
+ scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
+ specified, the credentials will automatically be scoped if
+ necessary.
+ default_scopes (Optional[Sequence[str]]): Default scopes passed by a
+ Google client library. Use 'scopes' for user-defined scopes.
+ request (Optional[google.auth.transport.Request]): An object used to make
+ HTTP requests. This is used to determine the associated project ID
+ for a workload identity pool resource (external account credentials).
+ If not specified, then it will use a
+ google.auth.transport.requests.Request client to make requests.
+
+ Returns:
+ Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
+ credentials and the project ID. External account credentials project
+ IDs may not always be determined.
+
+ Raises:
+ google.auth.exceptions.DefaultCredentialsError: if the info dictionary
+ is in the wrong format or is missing required information.
+ """
+ # There are currently 3 types of external_account credentials.
+ if info.get("subject_token_type") == _AWS_SUBJECT_TOKEN_TYPE:
+ # Check if configuration corresponds to an AWS credentials.
+ from google.auth import aws
+
+ credentials = aws.Credentials.from_info(
+ info, scopes=scopes, default_scopes=default_scopes
+ )
+ elif (
+ info.get("credential_source") is not None
+ and info.get("credential_source").get("executable") is not None
+ ):
+ from google.auth import pluggable
+
+ credentials = pluggable.Credentials.from_info(
+ info, scopes=scopes, default_scopes=default_scopes
+ )
+ else:
+ try:
+ # Check if configuration corresponds to an Identity Pool credentials.
+ from google.auth import identity_pool
+
+ credentials = identity_pool.Credentials.from_info(
+ info, scopes=scopes, default_scopes=default_scopes
+ )
+ except ValueError:
+ # If the configuration is invalid or does not correspond to any
+ # supported external_account credentials, raise an error.
+ raise exceptions.DefaultCredentialsError(
+ "Failed to load external account credentials from {}".format(filename)
+ )
+ if request is None:
+ import google.auth.transport.requests
+
+ request = google.auth.transport.requests.Request()
+
+ return credentials, credentials.get_project_id(request=request)
+
+
+def _get_external_account_authorized_user_credentials(
+ filename, info, scopes=None, default_scopes=None, request=None
+):
+ try:
+ from google.auth import external_account_authorized_user
+
+ credentials = external_account_authorized_user.Credentials.from_info(info)
+ except ValueError:
+ raise exceptions.DefaultCredentialsError(
+ "Failed to load external account authorized user credentials from {}".format(
+ filename
+ )
+ )
+
+ return credentials, None
+
+
+def _get_authorized_user_credentials(filename, info, scopes=None):
+ from google.oauth2 import credentials
+
+ try:
+ credentials = credentials.Credentials.from_authorized_user_info(
+ info, scopes=scopes
+ )
+ except ValueError as caught_exc:
+ msg = "Failed to load authorized user credentials from {}".format(filename)
+ new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
+ raise new_exc from caught_exc
+ return credentials, None
+
+
+def _get_service_account_credentials(filename, info, scopes=None, default_scopes=None):
+ from google.oauth2 import service_account
+
+ try:
+ credentials = service_account.Credentials.from_service_account_info(
+ info, scopes=scopes, default_scopes=default_scopes
+ )
+ except ValueError as caught_exc:
+ msg = "Failed to load service account credentials from {}".format(filename)
+ new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
+ raise new_exc from caught_exc
+ return credentials, info.get("project_id")
+
+
+def _get_impersonated_service_account_credentials(filename, info, scopes):
+ from google.auth import impersonated_credentials
+
+ try:
+ source_credentials_info = info.get("source_credentials")
+ source_credentials_type = source_credentials_info.get("type")
+ if source_credentials_type == _AUTHORIZED_USER_TYPE:
+ source_credentials, _ = _get_authorized_user_credentials(
+ filename, source_credentials_info
+ )
+ elif source_credentials_type == _SERVICE_ACCOUNT_TYPE:
+ source_credentials, _ = _get_service_account_credentials(
+ filename, source_credentials_info
+ )
+ else:
+ raise exceptions.InvalidType(
+ "source credential of type {} is not supported.".format(
+ source_credentials_type
+ )
+ )
+ impersonation_url = info.get("service_account_impersonation_url")
+ start_index = impersonation_url.rfind("/")
+ end_index = impersonation_url.find(":generateAccessToken")
+ if start_index == -1 or end_index == -1 or start_index > end_index:
+ raise exceptions.InvalidValue(
+ "Cannot extract target principal from {}".format(impersonation_url)
+ )
+ target_principal = impersonation_url[start_index + 1 : end_index]
+ delegates = info.get("delegates")
+ quota_project_id = info.get("quota_project_id")
+ credentials = impersonated_credentials.Credentials(
+ source_credentials,
+ target_principal,
+ scopes,
+ delegates,
+ quota_project_id=quota_project_id,
+ )
+ except ValueError as caught_exc:
+ msg = "Failed to load impersonated service account credentials from {}".format(
+ filename
+ )
+ new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
+ raise new_exc from caught_exc
+ return credentials, None
+
+
+def _get_gdch_service_account_credentials(filename, info):
+ from google.oauth2 import gdch_credentials
+
+ try:
+ credentials = gdch_credentials.ServiceAccountCredentials.from_service_account_info(
+ info
+ )
+ except ValueError as caught_exc:
+ msg = "Failed to load GDCH service account credentials from {}".format(filename)
+ new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
+ raise new_exc from caught_exc
+ return credentials, info.get("project")
+
+
+def get_api_key_credentials(key):
+ """Return credentials with the given API key."""
+ from google.auth import api_key
+
+ return api_key.Credentials(key)
+
+
+def _apply_quota_project_id(credentials, quota_project_id):
+ if quota_project_id:
+ credentials = credentials.with_quota_project(quota_project_id)
+ else:
+ credentials = credentials.with_quota_project_from_environment()
+
+ from google.oauth2 import credentials as authorized_user_credentials
+
+ if isinstance(credentials, authorized_user_credentials.Credentials) and (
+ not credentials.quota_project_id
+ ):
+ _warn_about_problematic_credentials(credentials)
+ return credentials
+
+
+def default(scopes=None, request=None, quota_project_id=None, default_scopes=None):
+ """Gets the default credentials for the current environment.
+
+ `Application Default Credentials`_ provides an easy way to obtain
+ credentials to call Google APIs for server-to-server or local applications.
+ This function acquires credentials from the environment in the following
+ order:
+
+ 1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
+ to the path of a valid service account JSON private key file, then it is
+ loaded and returned. The project ID returned is the project ID defined
+ in the service account file if available (some older files do not
+ contain project ID information).
+
+ If the environment variable is set to the path of a valid external
+ account JSON configuration file (workload identity federation), then the
+ configuration file is used to determine and retrieve the external
+ credentials from the current environment (AWS, Azure, etc).
+ These will then be exchanged for Google access tokens via the Google STS
+ endpoint.
+ The project ID returned in this case is the one corresponding to the
+ underlying workload identity pool resource if determinable.
+
+ If the environment variable is set to the path of a valid GDCH service
+ account JSON file (`Google Distributed Cloud Hosted`_), then a GDCH
+ credential will be returned. The project ID returned is the project
+ specified in the JSON file.
+ 2. If the `Google Cloud SDK`_ is installed and has application default
+ credentials set they are loaded and returned.
+
+ To enable application default credentials with the Cloud SDK run::
+
+ gcloud auth application-default login
+
+ If the Cloud SDK has an active project, the project ID is returned. The
+ active project can be set using::
+
+ gcloud config set project
+
+ 3. If the application is running in the `App Engine standard environment`_
+ (first generation) then the credentials and project ID from the
+ `App Identity Service`_ are used.
+ 4. If the application is running in `Compute Engine`_ or `Cloud Run`_ or
+ the `App Engine flexible environment`_ or the `App Engine standard
+ environment`_ (second generation) then the credentials and project ID
+ are obtained from the `Metadata Service`_.
+ 5. If no credentials are found,
+ :class:`~google.auth.exceptions.DefaultCredentialsError` will be raised.
+
+ .. _Application Default Credentials: https://developers.google.com\
+ /identity/protocols/application-default-credentials
+ .. _Google Cloud SDK: https://cloud.google.com/sdk
+ .. _App Engine standard environment: https://cloud.google.com/appengine
+ .. _App Identity Service: https://cloud.google.com/appengine/docs/python\
+ /appidentity/
+ .. _Compute Engine: https://cloud.google.com/compute
+ .. _App Engine flexible environment: https://cloud.google.com\
+ /appengine/flexible
+ .. _Metadata Service: https://cloud.google.com/compute/docs\
+ /storing-retrieving-metadata
+ .. _Cloud Run: https://cloud.google.com/run
+ .. _Google Distributed Cloud Hosted: https://cloud.google.com/blog/topics\
+ /hybrid-cloud/announcing-google-distributed-cloud-edge-and-hosted
+
+ Example::
+
+ import google.auth
+
+ credentials, project_id = google.auth.default()
+
+ Args:
+ scopes (Sequence[str]): The list of scopes for the credentials. If
+ specified, the credentials will automatically be scoped if
+ necessary.
+ request (Optional[google.auth.transport.Request]): An object used to make
+ HTTP requests. This is used to either detect whether the application
+ is running on Compute Engine or to determine the associated project
+ ID for a workload identity pool resource (external account
+ credentials). If not specified, then it will either use the standard
+ library http client to make requests for Compute Engine credentials
+ or a google.auth.transport.requests.Request client for external
+ account credentials.
+ quota_project_id (Optional[str]): The project ID used for
+ quota and billing.
+ default_scopes (Optional[Sequence[str]]): Default scopes passed by a
+ Google client library. Use 'scopes' for user-defined scopes.
+ Returns:
+ Tuple[~google.auth.credentials.Credentials, Optional[str]]:
+ the current environment's credentials and project ID. Project ID
+ may be None, which indicates that the Project ID could not be
+ ascertained from the environment.
+
+ Raises:
+ ~google.auth.exceptions.DefaultCredentialsError:
+ If no credentials were found, or if the credentials found were
+ invalid.
+ """
+ from google.auth.credentials import with_scopes_if_required
+ from google.auth.credentials import CredentialsWithQuotaProject
+
+ explicit_project_id = os.environ.get(
+ environment_vars.PROJECT, os.environ.get(environment_vars.LEGACY_PROJECT)
+ )
+
+ checkers = (
+ # Avoid passing scopes here to prevent passing scopes to user credentials.
+ # with_scopes_if_required() below will ensure scopes/default scopes are
+ # safely set on the returned credentials since requires_scopes will
+ # guard against setting scopes on user credentials.
+ lambda: _get_explicit_environ_credentials(quota_project_id=quota_project_id),
+ lambda: _get_gcloud_sdk_credentials(quota_project_id=quota_project_id),
+ _get_gae_credentials,
+ lambda: _get_gce_credentials(request, quota_project_id=quota_project_id),
+ )
+
+ for checker in checkers:
+ credentials, project_id = checker()
+ if credentials is not None:
+ credentials = with_scopes_if_required(
+ credentials, scopes, default_scopes=default_scopes
+ )
+
+ effective_project_id = explicit_project_id or project_id
+
+ # For external account credentials, scopes are required to determine
+ # the project ID. Try to get the project ID again if not yet
+ # determined.
+ if not effective_project_id and callable(
+ getattr(credentials, "get_project_id", None)
+ ):
+ if request is None:
+ import google.auth.transport.requests
+
+ request = google.auth.transport.requests.Request()
+ effective_project_id = credentials.get_project_id(request=request)
+
+ if quota_project_id and isinstance(
+ credentials, CredentialsWithQuotaProject
+ ):
+ credentials = credentials.with_quota_project(quota_project_id)
+
+ if not effective_project_id:
+ _LOGGER.warning(
+ "No project ID could be determined. Consider running "
+ "`gcloud config set project` or setting the %s "
+ "environment variable",
+ environment_vars.PROJECT,
+ )
+ return credentials, effective_project_id
+
+ raise exceptions.DefaultCredentialsError(_CLOUD_SDK_MISSING_CREDENTIALS)
diff --git a/Lib/site-packages/google/auth/_default_async.py b/Lib/site-packages/google/auth/_default_async.py
new file mode 100644
index 0000000..2e53e20
--- /dev/null
+++ b/Lib/site-packages/google/auth/_default_async.py
@@ -0,0 +1,282 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Application default credentials.
+
+Implements application default credentials and project ID detection.
+"""
+
+import io
+import json
+import os
+
+from google.auth import _default
+from google.auth import environment_vars
+from google.auth import exceptions
+
+
+def load_credentials_from_file(filename, scopes=None, quota_project_id=None):
+ """Loads Google credentials from a file.
+
+ The credentials file must be a service account key or stored authorized
+ user credentials.
+
+ Args:
+ filename (str): The full path to the credentials file.
+ scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
+ specified, the credentials will automatically be scoped if
+ necessary
+ quota_project_id (Optional[str]): The project ID used for
+ quota and billing.
+
+ Returns:
+ Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
+ credentials and the project ID. Authorized user credentials do not
+ have the project ID information.
+
+ Raises:
+ google.auth.exceptions.DefaultCredentialsError: if the file is in the
+ wrong format or is missing.
+ """
+ if not os.path.exists(filename):
+ raise exceptions.DefaultCredentialsError(
+ "File {} was not found.".format(filename)
+ )
+
+ with io.open(filename, "r") as file_obj:
+ try:
+ info = json.load(file_obj)
+ except ValueError as caught_exc:
+ new_exc = exceptions.DefaultCredentialsError(
+ "File {} is not a valid json file.".format(filename), caught_exc
+ )
+ raise new_exc from caught_exc
+
+ # The type key should indicate that the file is either a service account
+ # credentials file or an authorized user credentials file.
+ credential_type = info.get("type")
+
+ if credential_type == _default._AUTHORIZED_USER_TYPE:
+ from google.oauth2 import _credentials_async as credentials
+
+ try:
+ credentials = credentials.Credentials.from_authorized_user_info(
+ info, scopes=scopes
+ )
+ except ValueError as caught_exc:
+ msg = "Failed to load authorized user credentials from {}".format(filename)
+ new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
+ raise new_exc from caught_exc
+ if quota_project_id:
+ credentials = credentials.with_quota_project(quota_project_id)
+ if not credentials.quota_project_id:
+ _default._warn_about_problematic_credentials(credentials)
+ return credentials, None
+
+ elif credential_type == _default._SERVICE_ACCOUNT_TYPE:
+ from google.oauth2 import _service_account_async as service_account
+
+ try:
+ credentials = service_account.Credentials.from_service_account_info(
+ info, scopes=scopes
+ ).with_quota_project(quota_project_id)
+ except ValueError as caught_exc:
+ msg = "Failed to load service account credentials from {}".format(filename)
+ new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
+ raise new_exc from caught_exc
+ return credentials, info.get("project_id")
+
+ else:
+ raise exceptions.DefaultCredentialsError(
+ "The file {file} does not have a valid type. "
+ "Type is {type}, expected one of {valid_types}.".format(
+ file=filename, type=credential_type, valid_types=_default._VALID_TYPES
+ )
+ )
+
+
+def _get_gcloud_sdk_credentials(quota_project_id=None):
+ """Gets the credentials and project ID from the Cloud SDK."""
+ from google.auth import _cloud_sdk
+
+ # Check if application default credentials exist.
+ credentials_filename = _cloud_sdk.get_application_default_credentials_path()
+
+ if not os.path.isfile(credentials_filename):
+ return None, None
+
+ credentials, project_id = load_credentials_from_file(
+ credentials_filename, quota_project_id=quota_project_id
+ )
+
+ if not project_id:
+ project_id = _cloud_sdk.get_project_id()
+
+ return credentials, project_id
+
+
+def _get_explicit_environ_credentials(quota_project_id=None):
+ """Gets credentials from the GOOGLE_APPLICATION_CREDENTIALS environment
+ variable."""
+ from google.auth import _cloud_sdk
+
+ cloud_sdk_adc_path = _cloud_sdk.get_application_default_credentials_path()
+ explicit_file = os.environ.get(environment_vars.CREDENTIALS)
+
+ if explicit_file is not None and explicit_file == cloud_sdk_adc_path:
+ # Cloud sdk flow calls gcloud to fetch project id, so if the explicit
+ # file path is cloud sdk credentials path, then we should fall back
+ # to cloud sdk flow, otherwise project id cannot be obtained.
+ return _get_gcloud_sdk_credentials(quota_project_id=quota_project_id)
+
+ if explicit_file is not None:
+ credentials, project_id = load_credentials_from_file(
+ os.environ[environment_vars.CREDENTIALS], quota_project_id=quota_project_id
+ )
+
+ return credentials, project_id
+
+ else:
+ return None, None
+
+
+def _get_gae_credentials():
+ """Gets Google App Engine App Identity credentials and project ID."""
+ # While this library is normally bundled with app_engine, there are
+ # some cases where it's not available, so we tolerate ImportError.
+
+ return _default._get_gae_credentials()
+
+
+def _get_gce_credentials(request=None):
+ """Gets credentials and project ID from the GCE Metadata Service."""
+ # Ping requires a transport, but we want application default credentials
+ # to require no arguments. So, we'll use the _http_client transport which
+ # uses http.client. This is only acceptable because the metadata server
+ # doesn't do SSL and never requires proxies.
+
+ # While this library is normally bundled with compute_engine, there are
+ # some cases where it's not available, so we tolerate ImportError.
+
+ return _default._get_gce_credentials(request)
+
+
+def default_async(scopes=None, request=None, quota_project_id=None):
+ """Gets the default credentials for the current environment.
+
+ `Application Default Credentials`_ provides an easy way to obtain
+ credentials to call Google APIs for server-to-server or local applications.
+ This function acquires credentials from the environment in the following
+ order:
+
+ 1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
+ to the path of a valid service account JSON private key file, then it is
+ loaded and returned. The project ID returned is the project ID defined
+ in the service account file if available (some older files do not
+ contain project ID information).
+ 2. If the `Google Cloud SDK`_ is installed and has application default
+ credentials set they are loaded and returned.
+
+ To enable application default credentials with the Cloud SDK run::
+
+ gcloud auth application-default login
+
+ If the Cloud SDK has an active project, the project ID is returned. The
+ active project can be set using::
+
+ gcloud config set project
+
+ 3. If the application is running in the `App Engine standard environment`_
+ (first generation) then the credentials and project ID from the
+ `App Identity Service`_ are used.
+ 4. If the application is running in `Compute Engine`_ or `Cloud Run`_ or
+ the `App Engine flexible environment`_ or the `App Engine standard
+ environment`_ (second generation) then the credentials and project ID
+ are obtained from the `Metadata Service`_.
+ 5. If no credentials are found,
+ :class:`~google.auth.exceptions.DefaultCredentialsError` will be raised.
+
+ .. _Application Default Credentials: https://developers.google.com\
+ /identity/protocols/application-default-credentials
+ .. _Google Cloud SDK: https://cloud.google.com/sdk
+ .. _App Engine standard environment: https://cloud.google.com/appengine
+ .. _App Identity Service: https://cloud.google.com/appengine/docs/python\
+ /appidentity/
+ .. _Compute Engine: https://cloud.google.com/compute
+ .. _App Engine flexible environment: https://cloud.google.com\
+ /appengine/flexible
+ .. _Metadata Service: https://cloud.google.com/compute/docs\
+ /storing-retrieving-metadata
+ .. _Cloud Run: https://cloud.google.com/run
+
+ Example::
+
+ import google.auth
+
+ credentials, project_id = google.auth.default()
+
+ Args:
+ scopes (Sequence[str]): The list of scopes for the credentials. If
+ specified, the credentials will automatically be scoped if
+ necessary.
+ request (google.auth.transport.Request): An object used to make
+ HTTP requests. This is used to detect whether the application
+ is running on Compute Engine. If not specified, then it will
+ use the standard library http client to make requests.
+ quota_project_id (Optional[str]): The project ID used for
+ quota and billing.
+ Returns:
+ Tuple[~google.auth.credentials.Credentials, Optional[str]]:
+ the current environment's credentials and project ID. Project ID
+ may be None, which indicates that the Project ID could not be
+ ascertained from the environment.
+
+ Raises:
+ ~google.auth.exceptions.DefaultCredentialsError:
+ If no credentials were found, or if the credentials found were
+ invalid.
+ """
+ from google.auth._credentials_async import with_scopes_if_required
+ from google.auth.credentials import CredentialsWithQuotaProject
+
+ explicit_project_id = os.environ.get(
+ environment_vars.PROJECT, os.environ.get(environment_vars.LEGACY_PROJECT)
+ )
+
+ checkers = (
+ lambda: _get_explicit_environ_credentials(quota_project_id=quota_project_id),
+ lambda: _get_gcloud_sdk_credentials(quota_project_id=quota_project_id),
+ _get_gae_credentials,
+ lambda: _get_gce_credentials(request),
+ )
+
+ for checker in checkers:
+ credentials, project_id = checker()
+ if credentials is not None:
+ credentials = with_scopes_if_required(credentials, scopes)
+ if quota_project_id and isinstance(
+ credentials, CredentialsWithQuotaProject
+ ):
+ credentials = credentials.with_quota_project(quota_project_id)
+ effective_project_id = explicit_project_id or project_id
+ if not effective_project_id:
+ _default._LOGGER.warning(
+ "No project ID could be determined. Consider running "
+ "`gcloud config set project` or setting the %s "
+ "environment variable",
+ environment_vars.PROJECT,
+ )
+ return credentials, effective_project_id
+
+ raise exceptions.DefaultCredentialsError(_default._CLOUD_SDK_MISSING_CREDENTIALS)
diff --git a/Lib/site-packages/google/auth/_exponential_backoff.py b/Lib/site-packages/google/auth/_exponential_backoff.py
new file mode 100644
index 0000000..0dd621a
--- /dev/null
+++ b/Lib/site-packages/google/auth/_exponential_backoff.py
@@ -0,0 +1,109 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import random
+import time
+
+# The default amount of retry attempts
+_DEFAULT_RETRY_TOTAL_ATTEMPTS = 3
+
+# The default initial backoff period (1.0 second).
+_DEFAULT_INITIAL_INTERVAL_SECONDS = 1.0
+
+# The default randomization factor (0.1 which results in a random period ranging
+# between 10% below and 10% above the retry interval).
+_DEFAULT_RANDOMIZATION_FACTOR = 0.1
+
+# The default multiplier value (2 which is 100% increase per back off).
+_DEFAULT_MULTIPLIER = 2.0
+
+"""Exponential Backoff Utility
+
+This is a private module that implements the exponential back off algorithm.
+It can be used as a utility for code that needs to retry on failure, for example
+an HTTP request.
+"""
+
+
+class ExponentialBackoff:
+ """An exponential backoff iterator. This can be used in a for loop to
+ perform requests with exponential backoff.
+
+ Args:
+ total_attempts Optional[int]:
+ The maximum amount of retries that should happen.
+ The default value is 3 attempts.
+ initial_wait_seconds Optional[int]:
+ The amount of time to sleep in the first backoff. This parameter
+ should be in seconds.
+ The default value is 1 second.
+ randomization_factor Optional[float]:
+ The amount of jitter that should be in each backoff. For example,
+ a value of 0.1 will introduce a jitter range of 10% to the
+ current backoff period.
+ The default value is 0.1.
+ multiplier Optional[float]:
+ The backoff multipler. This adjusts how much each backoff will
+ increase. For example a value of 2.0 leads to a 200% backoff
+ on each attempt. If the initial_wait is 1.0 it would look like
+ this sequence [1.0, 2.0, 4.0, 8.0].
+ The default value is 2.0.
+ """
+
+ def __init__(
+ self,
+ total_attempts=_DEFAULT_RETRY_TOTAL_ATTEMPTS,
+ initial_wait_seconds=_DEFAULT_INITIAL_INTERVAL_SECONDS,
+ randomization_factor=_DEFAULT_RANDOMIZATION_FACTOR,
+ multiplier=_DEFAULT_MULTIPLIER,
+ ):
+ self._total_attempts = total_attempts
+ self._initial_wait_seconds = initial_wait_seconds
+
+ self._current_wait_in_seconds = self._initial_wait_seconds
+
+ self._randomization_factor = randomization_factor
+ self._multiplier = multiplier
+ self._backoff_count = 0
+
+ def __iter__(self):
+ self._backoff_count = 0
+ self._current_wait_in_seconds = self._initial_wait_seconds
+ return self
+
+ def __next__(self):
+ if self._backoff_count >= self._total_attempts:
+ raise StopIteration
+ self._backoff_count += 1
+
+ jitter_variance = self._current_wait_in_seconds * self._randomization_factor
+ jitter = random.uniform(
+ self._current_wait_in_seconds - jitter_variance,
+ self._current_wait_in_seconds + jitter_variance,
+ )
+
+ time.sleep(jitter)
+
+ self._current_wait_in_seconds *= self._multiplier
+ return self._backoff_count
+
+ @property
+ def total_attempts(self):
+ """The total amount of backoff attempts that will be made."""
+ return self._total_attempts
+
+ @property
+ def backoff_count(self):
+ """The current amount of backoff attempts that have been made."""
+ return self._backoff_count
diff --git a/Lib/site-packages/google/auth/_helpers.py b/Lib/site-packages/google/auth/_helpers.py
new file mode 100644
index 0000000..a6c07f7
--- /dev/null
+++ b/Lib/site-packages/google/auth/_helpers.py
@@ -0,0 +1,273 @@
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper functions for commonly used utilities."""
+
+import base64
+import calendar
+import datetime
+from email.message import Message
+import sys
+import urllib
+
+from google.auth import exceptions
+
+# The smallest MDS cache used by this library stores tokens until 4 minutes from
+# expiry.
+REFRESH_THRESHOLD = datetime.timedelta(minutes=3, seconds=45)
+
+
+def copy_docstring(source_class):
+ """Decorator that copies a method's docstring from another class.
+
+ Args:
+ source_class (type): The class that has the documented method.
+
+ Returns:
+ Callable: A decorator that will copy the docstring of the same
+ named method in the source class to the decorated method.
+ """
+
+ def decorator(method):
+ """Decorator implementation.
+
+ Args:
+ method (Callable): The method to copy the docstring to.
+
+ Returns:
+ Callable: the same method passed in with an updated docstring.
+
+ Raises:
+ google.auth.exceptions.InvalidOperation: if the method already has a docstring.
+ """
+ if method.__doc__:
+ raise exceptions.InvalidOperation("Method already has a docstring.")
+
+ source_method = getattr(source_class, method.__name__)
+ method.__doc__ = source_method.__doc__
+
+ return method
+
+ return decorator
+
+
+def parse_content_type(header_value):
+ """Parse a 'content-type' header value to get just the plain media-type (without parameters).
+
+ This is done using the class Message from email.message as suggested in PEP 594
+ (because the cgi is now deprecated and will be removed in python 3.13,
+ see https://peps.python.org/pep-0594/#cgi).
+
+ Args:
+ header_value (str): The value of a 'content-type' header as a string.
+
+ Returns:
+ str: A string with just the lowercase media-type from the parsed 'content-type' header.
+ If the provided content-type is not parsable, returns 'text/plain',
+ the default value for textual files.
+ """
+ m = Message()
+ m["content-type"] = header_value
+ return (
+ m.get_content_type()
+ ) # Despite the name, actually returns just the media-type
+
+
+def utcnow():
+ """Returns the current UTC datetime.
+
+ Returns:
+ datetime: The current time in UTC.
+ """
+ # We used datetime.utcnow() before, since it's deprecated from python 3.12,
+ # we are using datetime.now(timezone.utc) now. "utcnow()" is offset-native
+ # (no timezone info), but "now()" is offset-aware (with timezone info).
+ # This will cause datetime comparison problem. For backward compatibility,
+ # we need to remove the timezone info.
+ now = datetime.datetime.now(datetime.timezone.utc)
+ now = now.replace(tzinfo=None)
+ return now
+
+
+def datetime_to_secs(value):
+ """Convert a datetime object to the number of seconds since the UNIX epoch.
+
+ Args:
+ value (datetime): The datetime to convert.
+
+ Returns:
+ int: The number of seconds since the UNIX epoch.
+ """
+ return calendar.timegm(value.utctimetuple())
+
+
+def to_bytes(value, encoding="utf-8"):
+ """Converts a string value to bytes, if necessary.
+
+ Args:
+ value (Union[str, bytes]): The value to be converted.
+ encoding (str): The encoding to use to convert unicode to bytes.
+ Defaults to "utf-8".
+
+ Returns:
+ bytes: The original value converted to bytes (if unicode) or as
+ passed in if it started out as bytes.
+
+ Raises:
+ google.auth.exceptions.InvalidValue: If the value could not be converted to bytes.
+ """
+ result = value.encode(encoding) if isinstance(value, str) else value
+ if isinstance(result, bytes):
+ return result
+ else:
+ raise exceptions.InvalidValue(
+ "{0!r} could not be converted to bytes".format(value)
+ )
+
+
+def from_bytes(value):
+ """Converts bytes to a string value, if necessary.
+
+ Args:
+ value (Union[str, bytes]): The value to be converted.
+
+ Returns:
+ str: The original value converted to unicode (if bytes) or as passed in
+ if it started out as unicode.
+
+ Raises:
+ google.auth.exceptions.InvalidValue: If the value could not be converted to unicode.
+ """
+ result = value.decode("utf-8") if isinstance(value, bytes) else value
+ if isinstance(result, str):
+ return result
+ else:
+ raise exceptions.InvalidValue(
+ "{0!r} could not be converted to unicode".format(value)
+ )
+
+
+def update_query(url, params, remove=None):
+ """Updates a URL's query parameters.
+
+ Replaces any current values if they are already present in the URL.
+
+ Args:
+ url (str): The URL to update.
+ params (Mapping[str, str]): A mapping of query parameter
+ keys to values.
+ remove (Sequence[str]): Parameters to remove from the query string.
+
+ Returns:
+ str: The URL with updated query parameters.
+
+ Examples:
+
+ >>> url = 'http://example.com?a=1'
+ >>> update_query(url, {'a': '2'})
+ http://example.com?a=2
+ >>> update_query(url, {'b': '3'})
+ http://example.com?a=1&b=3
+ >> update_query(url, {'b': '3'}, remove=['a'])
+ http://example.com?b=3
+
+ """
+ if remove is None:
+ remove = []
+
+ # Split the URL into parts.
+ parts = urllib.parse.urlparse(url)
+ # Parse the query string.
+ query_params = urllib.parse.parse_qs(parts.query)
+ # Update the query parameters with the new parameters.
+ query_params.update(params)
+ # Remove any values specified in remove.
+ query_params = {
+ key: value for key, value in query_params.items() if key not in remove
+ }
+ # Re-encoded the query string.
+ new_query = urllib.parse.urlencode(query_params, doseq=True)
+ # Unsplit the url.
+ new_parts = parts._replace(query=new_query)
+ return urllib.parse.urlunparse(new_parts)
+
+
+def scopes_to_string(scopes):
+ """Converts scope value to a string suitable for sending to OAuth 2.0
+ authorization servers.
+
+ Args:
+ scopes (Sequence[str]): The sequence of scopes to convert.
+
+ Returns:
+ str: The scopes formatted as a single string.
+ """
+ return " ".join(scopes)
+
+
+def string_to_scopes(scopes):
+ """Converts stringifed scopes value to a list.
+
+ Args:
+ scopes (Union[Sequence, str]): The string of space-separated scopes
+ to convert.
+ Returns:
+ Sequence(str): The separated scopes.
+ """
+ if not scopes:
+ return []
+
+ return scopes.split(" ")
+
+
+def padded_urlsafe_b64decode(value):
+ """Decodes base64 strings lacking padding characters.
+
+ Google infrastructure tends to omit the base64 padding characters.
+
+ Args:
+ value (Union[str, bytes]): The encoded value.
+
+ Returns:
+ bytes: The decoded value
+ """
+ b64string = to_bytes(value)
+ padded = b64string + b"=" * (-len(b64string) % 4)
+ return base64.urlsafe_b64decode(padded)
+
+
+def unpadded_urlsafe_b64encode(value):
+ """Encodes base64 strings removing any padding characters.
+
+ `rfc 7515`_ defines Base64url to NOT include any padding
+ characters, but the stdlib doesn't do that by default.
+
+ _rfc7515: https://tools.ietf.org/html/rfc7515#page-6
+
+ Args:
+ value (Union[str|bytes]): The bytes-like value to encode
+
+ Returns:
+ Union[str|bytes]: The encoded value
+ """
+ return base64.urlsafe_b64encode(value).rstrip(b"=")
+
+
+def is_python_3():
+ """Check if the Python interpreter is Python 2 or 3.
+
+ Returns:
+ bool: True if the Python interpreter is Python 3 and False otherwise.
+ """
+ return sys.version_info > (3, 0)
diff --git a/Lib/site-packages/google/auth/_jwt_async.py b/Lib/site-packages/google/auth/_jwt_async.py
new file mode 100644
index 0000000..3a1abc5
--- /dev/null
+++ b/Lib/site-packages/google/auth/_jwt_async.py
@@ -0,0 +1,164 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""JSON Web Tokens
+
+Provides support for creating (encoding) and verifying (decoding) JWTs,
+especially JWTs generated and consumed by Google infrastructure.
+
+See `rfc7519`_ for more details on JWTs.
+
+To encode a JWT use :func:`encode`::
+
+ from google.auth import crypt
+ from google.auth import jwt_async
+
+ signer = crypt.Signer(private_key)
+ payload = {'some': 'payload'}
+ encoded = jwt_async.encode(signer, payload)
+
+To decode a JWT and verify claims use :func:`decode`::
+
+ claims = jwt_async.decode(encoded, certs=public_certs)
+
+You can also skip verification::
+
+ claims = jwt_async.decode(encoded, verify=False)
+
+.. _rfc7519: https://tools.ietf.org/html/rfc7519
+
+
+NOTE: This async support is experimental and marked internal. This surface may
+change in minor releases.
+"""
+
+from google.auth import _credentials_async
+from google.auth import jwt
+
+
+def encode(signer, payload, header=None, key_id=None):
+ """Make a signed JWT.
+
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign the JWT.
+ payload (Mapping[str, str]): The JWT payload.
+ header (Mapping[str, str]): Additional JWT header payload.
+ key_id (str): The key id to add to the JWT header. If the
+ signer has a key id it will be used as the default. If this is
+ specified it will override the signer's key id.
+
+ Returns:
+ bytes: The encoded JWT.
+ """
+ return jwt.encode(signer, payload, header, key_id)
+
+
+def decode(token, certs=None, verify=True, audience=None):
+ """Decode and verify a JWT.
+
+ Args:
+ token (str): The encoded JWT.
+ certs (Union[str, bytes, Mapping[str, Union[str, bytes]]]): The
+ certificate used to validate the JWT signature. If bytes or string,
+ it must the the public key certificate in PEM format. If a mapping,
+ it must be a mapping of key IDs to public key certificates in PEM
+ format. The mapping must contain the same key ID that's specified
+ in the token's header.
+ verify (bool): Whether to perform signature and claim validation.
+ Verification is done by default.
+ audience (str): The audience claim, 'aud', that this JWT should
+ contain. If None then the JWT's 'aud' parameter is not verified.
+
+ Returns:
+ Mapping[str, str]: The deserialized JSON payload in the JWT.
+
+ Raises:
+ ValueError: if any verification checks failed.
+ """
+
+ return jwt.decode(token, certs, verify, audience)
+
+
+class Credentials(
+ jwt.Credentials, _credentials_async.Signing, _credentials_async.Credentials
+):
+ """Credentials that use a JWT as the bearer token.
+
+ These credentials require an "audience" claim. This claim identifies the
+ intended recipient of the bearer token.
+
+ The constructor arguments determine the claims for the JWT that is
+ sent with requests. Usually, you'll construct these credentials with
+ one of the helper constructors as shown in the next section.
+
+ To create JWT credentials using a Google service account private key
+ JSON file::
+
+ audience = 'https://pubsub.googleapis.com/google.pubsub.v1.Publisher'
+ credentials = jwt_async.Credentials.from_service_account_file(
+ 'service-account.json',
+ audience=audience)
+
+ If you already have the service account file loaded and parsed::
+
+ service_account_info = json.load(open('service_account.json'))
+ credentials = jwt_async.Credentials.from_service_account_info(
+ service_account_info,
+ audience=audience)
+
+ Both helper methods pass on arguments to the constructor, so you can
+ specify the JWT claims::
+
+ credentials = jwt_async.Credentials.from_service_account_file(
+ 'service-account.json',
+ audience=audience,
+ additional_claims={'meta': 'data'})
+
+ You can also construct the credentials directly if you have a
+ :class:`~google.auth.crypt.Signer` instance::
+
+ credentials = jwt_async.Credentials(
+ signer,
+ issuer='your-issuer',
+ subject='your-subject',
+ audience=audience)
+
+ The claims are considered immutable. If you want to modify the claims,
+ you can easily create another instance using :meth:`with_claims`::
+
+ new_audience = (
+ 'https://pubsub.googleapis.com/google.pubsub.v1.Subscriber')
+ new_credentials = credentials.with_claims(audience=new_audience)
+ """
+
+
+class OnDemandCredentials(
+ jwt.OnDemandCredentials, _credentials_async.Signing, _credentials_async.Credentials
+):
+ """On-demand JWT credentials.
+
+ Like :class:`Credentials`, this class uses a JWT as the bearer token for
+ authentication. However, this class does not require the audience at
+ construction time. Instead, it will generate a new token on-demand for
+ each request using the request URI as the audience. It caches tokens
+ so that multiple requests to the same URI do not incur the overhead
+ of generating a new token every time.
+
+ This behavior is especially useful for `gRPC`_ clients. A gRPC service may
+ have multiple audience and gRPC clients may not know all of the audiences
+ required for accessing a particular service. With these credentials,
+ no knowledge of the audiences is required ahead of time.
+
+ .. _grpc: http://www.grpc.io/
+ """
diff --git a/Lib/site-packages/google/auth/_oauth2client.py b/Lib/site-packages/google/auth/_oauth2client.py
new file mode 100644
index 0000000..8b83ff2
--- /dev/null
+++ b/Lib/site-packages/google/auth/_oauth2client.py
@@ -0,0 +1,167 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for transitioning from oauth2client to google-auth.
+
+.. warning::
+ This module is private as it is intended to assist first-party downstream
+ clients with the transition from oauth2client to google-auth.
+"""
+
+from __future__ import absolute_import
+
+from google.auth import _helpers
+import google.auth.app_engine
+import google.auth.compute_engine
+import google.oauth2.credentials
+import google.oauth2.service_account
+
+try:
+ import oauth2client.client # type: ignore
+ import oauth2client.contrib.gce # type: ignore
+ import oauth2client.service_account # type: ignore
+except ImportError as caught_exc:
+ raise ImportError("oauth2client is not installed.") from caught_exc
+
+try:
+ import oauth2client.contrib.appengine # type: ignore
+
+ _HAS_APPENGINE = True
+except ImportError:
+ _HAS_APPENGINE = False
+
+
+_CONVERT_ERROR_TMPL = "Unable to convert {} to a google-auth credentials class."
+
+
+def _convert_oauth2_credentials(credentials):
+ """Converts to :class:`google.oauth2.credentials.Credentials`.
+
+ Args:
+ credentials (Union[oauth2client.client.OAuth2Credentials,
+ oauth2client.client.GoogleCredentials]): The credentials to
+ convert.
+
+ Returns:
+ google.oauth2.credentials.Credentials: The converted credentials.
+ """
+ new_credentials = google.oauth2.credentials.Credentials(
+ token=credentials.access_token,
+ refresh_token=credentials.refresh_token,
+ token_uri=credentials.token_uri,
+ client_id=credentials.client_id,
+ client_secret=credentials.client_secret,
+ scopes=credentials.scopes,
+ )
+
+ new_credentials._expires = credentials.token_expiry
+
+ return new_credentials
+
+
+def _convert_service_account_credentials(credentials):
+ """Converts to :class:`google.oauth2.service_account.Credentials`.
+
+ Args:
+ credentials (Union[
+ oauth2client.service_account.ServiceAccountCredentials,
+ oauth2client.service_account._JWTAccessCredentials]): The
+ credentials to convert.
+
+ Returns:
+ google.oauth2.service_account.Credentials: The converted credentials.
+ """
+ info = credentials.serialization_data.copy()
+ info["token_uri"] = credentials.token_uri
+ return google.oauth2.service_account.Credentials.from_service_account_info(info)
+
+
+def _convert_gce_app_assertion_credentials(credentials):
+ """Converts to :class:`google.auth.compute_engine.Credentials`.
+
+ Args:
+ credentials (oauth2client.contrib.gce.AppAssertionCredentials): The
+ credentials to convert.
+
+ Returns:
+ google.oauth2.service_account.Credentials: The converted credentials.
+ """
+ return google.auth.compute_engine.Credentials(
+ service_account_email=credentials.service_account_email
+ )
+
+
+def _convert_appengine_app_assertion_credentials(credentials):
+ """Converts to :class:`google.auth.app_engine.Credentials`.
+
+ Args:
+ credentials (oauth2client.contrib.app_engine.AppAssertionCredentials):
+ The credentials to convert.
+
+ Returns:
+ google.oauth2.service_account.Credentials: The converted credentials.
+ """
+ # pylint: disable=invalid-name
+ return google.auth.app_engine.Credentials(
+ scopes=_helpers.string_to_scopes(credentials.scope),
+ service_account_id=credentials.service_account_id,
+ )
+
+
+_CLASS_CONVERSION_MAP = {
+ oauth2client.client.OAuth2Credentials: _convert_oauth2_credentials,
+ oauth2client.client.GoogleCredentials: _convert_oauth2_credentials,
+ oauth2client.service_account.ServiceAccountCredentials: _convert_service_account_credentials,
+ oauth2client.service_account._JWTAccessCredentials: _convert_service_account_credentials,
+ oauth2client.contrib.gce.AppAssertionCredentials: _convert_gce_app_assertion_credentials,
+}
+
+if _HAS_APPENGINE:
+ _CLASS_CONVERSION_MAP[
+ oauth2client.contrib.appengine.AppAssertionCredentials
+ ] = _convert_appengine_app_assertion_credentials
+
+
+def convert(credentials):
+ """Convert oauth2client credentials to google-auth credentials.
+
+ This class converts:
+
+ - :class:`oauth2client.client.OAuth2Credentials` to
+ :class:`google.oauth2.credentials.Credentials`.
+ - :class:`oauth2client.client.GoogleCredentials` to
+ :class:`google.oauth2.credentials.Credentials`.
+ - :class:`oauth2client.service_account.ServiceAccountCredentials` to
+ :class:`google.oauth2.service_account.Credentials`.
+ - :class:`oauth2client.service_account._JWTAccessCredentials` to
+ :class:`google.oauth2.service_account.Credentials`.
+ - :class:`oauth2client.contrib.gce.AppAssertionCredentials` to
+ :class:`google.auth.compute_engine.Credentials`.
+ - :class:`oauth2client.contrib.appengine.AppAssertionCredentials` to
+ :class:`google.auth.app_engine.Credentials`.
+
+ Returns:
+ google.auth.credentials.Credentials: The converted credentials.
+
+ Raises:
+ ValueError: If the credentials could not be converted.
+ """
+
+ credentials_class = type(credentials)
+
+ try:
+ return _CLASS_CONVERSION_MAP[credentials_class](credentials)
+ except KeyError as caught_exc:
+ new_exc = ValueError(_CONVERT_ERROR_TMPL.format(credentials_class))
+ raise new_exc from caught_exc
diff --git a/Lib/site-packages/google/auth/_refresh_worker.py b/Lib/site-packages/google/auth/_refresh_worker.py
new file mode 100644
index 0000000..9bb0ccc
--- /dev/null
+++ b/Lib/site-packages/google/auth/_refresh_worker.py
@@ -0,0 +1,109 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+import logging
+import threading
+
+import google.auth.exceptions as e
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class RefreshThreadManager:
+ """
+ Organizes exactly one background job that refresh a token.
+ """
+
+ def __init__(self):
+ """Initializes the manager."""
+
+ self._worker = None
+ self._lock = threading.Lock() # protects access to worker threads.
+
+ def start_refresh(self, cred, request):
+ """Starts a refresh thread for the given credentials.
+ The credentials are refreshed using the request parameter.
+ request and cred MUST not be None
+
+ Returns True if a background refresh was kicked off. False otherwise.
+
+ Args:
+ cred: A credentials object.
+ request: A request object.
+ Returns:
+ bool
+ """
+ if cred is None or request is None:
+ raise e.InvalidValue(
+ "Unable to start refresh. cred and request must be valid and instantiated objects."
+ )
+
+ with self._lock:
+ if self._worker is not None and self._worker._error_info is not None:
+ return False
+
+ if self._worker is None or not self._worker.is_alive(): # pragma: NO COVER
+ self._worker = RefreshThread(cred=cred, request=copy.deepcopy(request))
+ self._worker.start()
+ return True
+
+ def clear_error(self):
+ """
+ Removes any errors that were stored from previous background refreshes.
+ """
+ with self._lock:
+ if self._worker:
+ self._worker._error_info = None
+
+ def __getstate__(self):
+ """Pickle helper that serializes the _lock attribute."""
+ state = self.__dict__.copy()
+ state["_lock"] = None
+ return state
+
+ def __setstate__(self, state):
+ """Pickle helper that deserializes the _lock attribute."""
+ state["_key"] = threading.Lock()
+ self.__dict__.update(state)
+
+
+class RefreshThread(threading.Thread):
+ """
+ Thread that refreshes credentials.
+ """
+
+ def __init__(self, cred, request, **kwargs):
+ """Initializes the thread.
+
+ Args:
+ cred: A Credential object to refresh.
+ request: A Request object used to perform a credential refresh.
+ **kwargs: Additional keyword arguments.
+ """
+
+ super().__init__(**kwargs)
+ self._cred = cred
+ self._request = request
+ self._error_info = None
+
+ def run(self):
+ """
+ Perform the credential refresh.
+ """
+ try:
+ self._cred.refresh(self._request)
+ except Exception as err: # pragma: NO COVER
+ _LOGGER.error(f"Background refresh failed due to: {err}")
+ self._error_info = err
diff --git a/Lib/site-packages/google/auth/_service_account_info.py b/Lib/site-packages/google/auth/_service_account_info.py
new file mode 100644
index 0000000..6b64adc
--- /dev/null
+++ b/Lib/site-packages/google/auth/_service_account_info.py
@@ -0,0 +1,80 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper functions for loading data from a Google service account file."""
+
+import io
+import json
+
+from google.auth import crypt
+from google.auth import exceptions
+
+
+def from_dict(data, require=None, use_rsa_signer=True):
+ """Validates a dictionary containing Google service account data.
+
+ Creates and returns a :class:`google.auth.crypt.Signer` instance from the
+ private key specified in the data.
+
+ Args:
+ data (Mapping[str, str]): The service account data
+ require (Sequence[str]): List of keys required to be present in the
+ info.
+ use_rsa_signer (Optional[bool]): Whether to use RSA signer or EC signer.
+ We use RSA signer by default.
+
+ Returns:
+ google.auth.crypt.Signer: A signer created from the private key in the
+ service account file.
+
+ Raises:
+ MalformedError: if the data was in the wrong format, or if one of the
+ required keys is missing.
+ """
+ keys_needed = set(require if require is not None else [])
+
+ missing = keys_needed.difference(data.keys())
+
+ if missing:
+ raise exceptions.MalformedError(
+ "Service account info was not in the expected format, missing "
+ "fields {}.".format(", ".join(missing))
+ )
+
+ # Create a signer.
+ if use_rsa_signer:
+ signer = crypt.RSASigner.from_service_account_info(data)
+ else:
+ signer = crypt.ES256Signer.from_service_account_info(data)
+
+ return signer
+
+
+def from_filename(filename, require=None, use_rsa_signer=True):
+ """Reads a Google service account JSON file and returns its parsed info.
+
+ Args:
+ filename (str): The path to the service account .json file.
+ require (Sequence[str]): List of keys required to be present in the
+ info.
+ use_rsa_signer (Optional[bool]): Whether to use RSA signer or EC signer.
+ We use RSA signer by default.
+
+ Returns:
+ Tuple[ Mapping[str, str], google.auth.crypt.Signer ]: The verified
+ info and a signer instance.
+ """
+ with io.open(filename, "r", encoding="utf-8") as json_file:
+ data = json.load(json_file)
+ return data, from_dict(data, require=require, use_rsa_signer=use_rsa_signer)
diff --git a/Lib/site-packages/google/auth/api_key.py b/Lib/site-packages/google/auth/api_key.py
new file mode 100644
index 0000000..4fdf7f2
--- /dev/null
+++ b/Lib/site-packages/google/auth/api_key.py
@@ -0,0 +1,76 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google API key support.
+This module provides authentication using the `API key`_.
+.. _API key:
+ https://cloud.google.com/docs/authentication/api-keys/
+"""
+
+from google.auth import _helpers
+from google.auth import credentials
+from google.auth import exceptions
+
+
+class Credentials(credentials.Credentials):
+ """API key credentials.
+ These credentials use API key to provide authorization to applications.
+ """
+
+ def __init__(self, token):
+ """
+ Args:
+ token (str): API key string
+ Raises:
+ ValueError: If the provided API key is not a non-empty string.
+ """
+ super(Credentials, self).__init__()
+ if not token:
+ raise exceptions.InvalidValue("Token must be a non-empty API key string")
+ self.token = token
+
+ @property
+ def expired(self):
+ return False
+
+ @property
+ def valid(self):
+ return True
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def refresh(self, request):
+ return
+
+ def apply(self, headers, token=None):
+ """Apply the API key token to the x-goog-api-key header.
+ Args:
+ headers (Mapping): The HTTP request headers.
+ token (Optional[str]): If specified, overrides the current access
+ token.
+ """
+ headers["x-goog-api-key"] = token or self.token
+
+ def before_request(self, request, method, url, headers):
+ """Performs credential-specific before request logic.
+ Refreshes the credentials if necessary, then calls :meth:`apply` to
+ apply the token to the x-goog-api-key header.
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ method (str): The request's HTTP method or the RPC method being
+ invoked.
+ url (str): The request's URI or the RPC service's URI.
+ headers (Mapping): The request's headers.
+ """
+ self.apply(headers)
diff --git a/Lib/site-packages/google/auth/app_engine.py b/Lib/site-packages/google/auth/app_engine.py
new file mode 100644
index 0000000..7083ee6
--- /dev/null
+++ b/Lib/site-packages/google/auth/app_engine.py
@@ -0,0 +1,180 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google App Engine standard environment support.
+
+This module provides authentication and signing for applications running on App
+Engine in the standard environment using the `App Identity API`_.
+
+
+.. _App Identity API:
+ https://cloud.google.com/appengine/docs/python/appidentity/
+"""
+
+import datetime
+
+from google.auth import _helpers
+from google.auth import credentials
+from google.auth import crypt
+from google.auth import exceptions
+
+# pytype: disable=import-error
+try:
+ from google.appengine.api import app_identity # type: ignore
+except ImportError:
+ app_identity = None # type: ignore
+# pytype: enable=import-error
+
+
+class Signer(crypt.Signer):
+ """Signs messages using the App Engine App Identity service.
+
+ This can be used in place of :class:`google.auth.crypt.Signer` when
+ running in the App Engine standard environment.
+ """
+
+ @property
+ def key_id(self):
+ """Optional[str]: The key ID used to identify this private key.
+
+ .. warning::
+ This is always ``None``. The key ID used by App Engine can not
+ be reliably determined ahead of time.
+ """
+ return None
+
+ @_helpers.copy_docstring(crypt.Signer)
+ def sign(self, message):
+ message = _helpers.to_bytes(message)
+ _, signature = app_identity.sign_blob(message)
+ return signature
+
+
+def get_project_id():
+ """Gets the project ID for the current App Engine application.
+
+ Returns:
+ str: The project ID
+
+ Raises:
+ google.auth.exceptions.OSError: If the App Engine APIs are unavailable.
+ """
+ # pylint: disable=missing-raises-doc
+ # Pylint rightfully thinks google.auth.exceptions.OSError is OSError, but doesn't
+ # realize it's a valid alias.
+ if app_identity is None:
+ raise exceptions.OSError("The App Engine APIs are not available.")
+ return app_identity.get_application_id()
+
+
+class Credentials(
+ credentials.Scoped, credentials.Signing, credentials.CredentialsWithQuotaProject
+):
+ """App Engine standard environment credentials.
+
+ These credentials use the App Engine App Identity API to obtain access
+ tokens.
+ """
+
+ def __init__(
+ self,
+ scopes=None,
+ default_scopes=None,
+ service_account_id=None,
+ quota_project_id=None,
+ ):
+ """
+ Args:
+ scopes (Sequence[str]): Scopes to request from the App Identity
+ API.
+ default_scopes (Sequence[str]): Default scopes passed by a
+ Google client library. Use 'scopes' for user-defined scopes.
+ service_account_id (str): The service account ID passed into
+ :func:`google.appengine.api.app_identity.get_access_token`.
+ If not specified, the default application service account
+ ID will be used.
+ quota_project_id (Optional[str]): The project ID used for quota
+ and billing.
+
+ Raises:
+ google.auth.exceptions.OSError: If the App Engine APIs are unavailable.
+ """
+ # pylint: disable=missing-raises-doc
+ # Pylint rightfully thinks google.auth.exceptions.OSError is OSError, but doesn't
+ # realize it's a valid alias.
+ if app_identity is None:
+ raise exceptions.OSError("The App Engine APIs are not available.")
+
+ super(Credentials, self).__init__()
+ self._scopes = scopes
+ self._default_scopes = default_scopes
+ self._service_account_id = service_account_id
+ self._signer = Signer()
+ self._quota_project_id = quota_project_id
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def refresh(self, request):
+ scopes = self._scopes if self._scopes is not None else self._default_scopes
+ # pylint: disable=unused-argument
+ token, ttl = app_identity.get_access_token(scopes, self._service_account_id)
+ expiry = datetime.datetime.utcfromtimestamp(ttl)
+
+ self.token, self.expiry = token, expiry
+
+ @property
+ def service_account_email(self):
+ """The service account email."""
+ if self._service_account_id is None:
+ self._service_account_id = app_identity.get_service_account_name()
+ return self._service_account_id
+
+ @property
+ def requires_scopes(self):
+ """Checks if the credentials requires scopes.
+
+ Returns:
+ bool: True if there are no scopes set otherwise False.
+ """
+ return not self._scopes and not self._default_scopes
+
+ @_helpers.copy_docstring(credentials.Scoped)
+ def with_scopes(self, scopes, default_scopes=None):
+ return self.__class__(
+ scopes=scopes,
+ default_scopes=default_scopes,
+ service_account_id=self._service_account_id,
+ quota_project_id=self.quota_project_id,
+ )
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ return self.__class__(
+ scopes=self._scopes,
+ service_account_id=self._service_account_id,
+ quota_project_id=quota_project_id,
+ )
+
+ @_helpers.copy_docstring(credentials.Signing)
+ def sign_bytes(self, message):
+ return self._signer.sign(message)
+
+ @property # type: ignore
+ @_helpers.copy_docstring(credentials.Signing)
+ def signer_email(self):
+ return self.service_account_email
+
+ @property # type: ignore
+ @_helpers.copy_docstring(credentials.Signing)
+ def signer(self):
+ return self._signer
diff --git a/Lib/site-packages/google/auth/aws.py b/Lib/site-packages/google/auth/aws.py
new file mode 100644
index 0000000..6e0e4e8
--- /dev/null
+++ b/Lib/site-packages/google/auth/aws.py
@@ -0,0 +1,777 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""AWS Credentials and AWS Signature V4 Request Signer.
+
+This module provides credentials to access Google Cloud resources from Amazon
+Web Services (AWS) workloads. These credentials are recommended over the
+use of service account credentials in AWS as they do not involve the management
+of long-live service account private keys.
+
+AWS Credentials are initialized using external_account arguments which are
+typically loaded from the external credentials JSON file.
+Unlike other Credentials that can be initialized with a list of explicit
+arguments, secrets or credentials, external account clients use the
+environment and hints/guidelines provided by the external_account JSON
+file to retrieve credentials and exchange them for Google access tokens.
+
+This module also provides a basic implementation of the
+`AWS Signature Version 4`_ request signing algorithm.
+
+AWS Credentials use serialized signed requests to the
+`AWS STS GetCallerIdentity`_ API that can be exchanged for Google access tokens
+via the GCP STS endpoint.
+
+.. _AWS Signature Version 4: https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html
+.. _AWS STS GetCallerIdentity: https://docs.aws.amazon.com/STS/latest/APIReference/API_GetCallerIdentity.html
+"""
+
+import hashlib
+import hmac
+import http.client as http_client
+import json
+import os
+import posixpath
+import re
+import urllib
+from urllib.parse import urljoin
+
+from google.auth import _helpers
+from google.auth import environment_vars
+from google.auth import exceptions
+from google.auth import external_account
+
+# AWS Signature Version 4 signing algorithm identifier.
+_AWS_ALGORITHM = "AWS4-HMAC-SHA256"
+# The termination string for the AWS credential scope value as defined in
+# https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html
+_AWS_REQUEST_TYPE = "aws4_request"
+# The AWS authorization header name for the security session token if available.
+_AWS_SECURITY_TOKEN_HEADER = "x-amz-security-token"
+# The AWS authorization header name for the auto-generated date.
+_AWS_DATE_HEADER = "x-amz-date"
+
+
+class RequestSigner(object):
+ """Implements an AWS request signer based on the AWS Signature Version 4 signing
+ process.
+ https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html
+ """
+
+ def __init__(self, region_name):
+ """Instantiates an AWS request signer used to compute authenticated signed
+ requests to AWS APIs based on the AWS Signature Version 4 signing process.
+
+ Args:
+ region_name (str): The AWS region to use.
+ """
+
+ self._region_name = region_name
+
+ def get_request_options(
+ self,
+ aws_security_credentials,
+ url,
+ method,
+ request_payload="",
+ additional_headers={},
+ ):
+ """Generates the signed request for the provided HTTP request for calling
+ an AWS API. This follows the steps described at:
+ https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html
+
+ Args:
+ aws_security_credentials (Mapping[str, str]): A dictionary containing
+ the AWS security credentials.
+ url (str): The AWS service URL containing the canonical URI and
+ query string.
+ method (str): The HTTP method used to call this API.
+ request_payload (Optional[str]): The optional request payload if
+ available.
+ additional_headers (Optional[Mapping[str, str]]): The optional
+ additional headers needed for the requested AWS API.
+
+ Returns:
+ Mapping[str, str]: The AWS signed request dictionary object.
+ """
+ # Get AWS credentials.
+ access_key = aws_security_credentials.get("access_key_id")
+ secret_key = aws_security_credentials.get("secret_access_key")
+ security_token = aws_security_credentials.get("security_token")
+
+ additional_headers = additional_headers or {}
+
+ uri = urllib.parse.urlparse(url)
+ # Normalize the URL path. This is needed for the canonical_uri.
+ # os.path.normpath can't be used since it normalizes "/" paths
+ # to "\\" in Windows OS.
+ normalized_uri = urllib.parse.urlparse(
+ urljoin(url, posixpath.normpath(uri.path))
+ )
+ # Validate provided URL.
+ if not uri.hostname or uri.scheme != "https":
+ raise exceptions.InvalidResource("Invalid AWS service URL")
+
+ header_map = _generate_authentication_header_map(
+ host=uri.hostname,
+ canonical_uri=normalized_uri.path or "/",
+ canonical_querystring=_get_canonical_querystring(uri.query),
+ method=method,
+ region=self._region_name,
+ access_key=access_key,
+ secret_key=secret_key,
+ security_token=security_token,
+ request_payload=request_payload,
+ additional_headers=additional_headers,
+ )
+ headers = {
+ "Authorization": header_map.get("authorization_header"),
+ "host": uri.hostname,
+ }
+ # Add x-amz-date if available.
+ if "amz_date" in header_map:
+ headers[_AWS_DATE_HEADER] = header_map.get("amz_date")
+ # Append additional optional headers, eg. X-Amz-Target, Content-Type, etc.
+ for key in additional_headers:
+ headers[key] = additional_headers[key]
+
+ # Add session token if available.
+ if security_token is not None:
+ headers[_AWS_SECURITY_TOKEN_HEADER] = security_token
+
+ signed_request = {"url": url, "method": method, "headers": headers}
+ if request_payload:
+ signed_request["data"] = request_payload
+ return signed_request
+
+
+def _get_canonical_querystring(query):
+ """Generates the canonical query string given a raw query string.
+ Logic is based on
+ https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
+
+ Args:
+ query (str): The raw query string.
+
+ Returns:
+ str: The canonical query string.
+ """
+ # Parse raw query string.
+ querystring = urllib.parse.parse_qs(query)
+ querystring_encoded_map = {}
+ for key in querystring:
+ quote_key = urllib.parse.quote(key, safe="-_.~")
+ # URI encode key.
+ querystring_encoded_map[quote_key] = []
+ for item in querystring[key]:
+ # For each key, URI encode all values for that key.
+ querystring_encoded_map[quote_key].append(
+ urllib.parse.quote(item, safe="-_.~")
+ )
+ # Sort values for each key.
+ querystring_encoded_map[quote_key].sort()
+ # Sort keys.
+ sorted_keys = list(querystring_encoded_map.keys())
+ sorted_keys.sort()
+ # Reconstruct the query string. Preserve keys with multiple values.
+ querystring_encoded_pairs = []
+ for key in sorted_keys:
+ for item in querystring_encoded_map[key]:
+ querystring_encoded_pairs.append("{}={}".format(key, item))
+ return "&".join(querystring_encoded_pairs)
+
+
+def _sign(key, msg):
+ """Creates the HMAC-SHA256 hash of the provided message using the provided
+ key.
+
+ Args:
+ key (str): The HMAC-SHA256 key to use.
+ msg (str): The message to hash.
+
+ Returns:
+ str: The computed hash bytes.
+ """
+ return hmac.new(key, msg.encode("utf-8"), hashlib.sha256).digest()
+
+
+def _get_signing_key(key, date_stamp, region_name, service_name):
+ """Calculates the signing key used to calculate the signature for
+ AWS Signature Version 4 based on:
+ https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html
+
+ Args:
+ key (str): The AWS secret access key.
+ date_stamp (str): The '%Y%m%d' date format.
+ region_name (str): The AWS region.
+ service_name (str): The AWS service name, eg. sts.
+
+ Returns:
+ str: The signing key bytes.
+ """
+ k_date = _sign(("AWS4" + key).encode("utf-8"), date_stamp)
+ k_region = _sign(k_date, region_name)
+ k_service = _sign(k_region, service_name)
+ k_signing = _sign(k_service, "aws4_request")
+ return k_signing
+
+
+def _generate_authentication_header_map(
+ host,
+ canonical_uri,
+ canonical_querystring,
+ method,
+ region,
+ access_key,
+ secret_key,
+ security_token,
+ request_payload="",
+ additional_headers={},
+):
+ """Generates the authentication header map needed for generating the AWS
+ Signature Version 4 signed request.
+
+ Args:
+ host (str): The AWS service URL hostname.
+ canonical_uri (str): The AWS service URL path name.
+ canonical_querystring (str): The AWS service URL query string.
+ method (str): The HTTP method used to call this API.
+ region (str): The AWS region.
+ access_key (str): The AWS access key ID.
+ secret_key (str): The AWS secret access key.
+ security_token (Optional[str]): The AWS security session token. This is
+ available for temporary sessions.
+ request_payload (Optional[str]): The optional request payload if
+ available.
+ additional_headers (Optional[Mapping[str, str]]): The optional
+ additional headers needed for the requested AWS API.
+
+ Returns:
+ Mapping[str, str]: The AWS authentication header dictionary object.
+ This contains the x-amz-date and authorization header information.
+ """
+ # iam.amazonaws.com host => iam service.
+ # sts.us-east-2.amazonaws.com host => sts service.
+ service_name = host.split(".")[0]
+
+ current_time = _helpers.utcnow()
+ amz_date = current_time.strftime("%Y%m%dT%H%M%SZ")
+ date_stamp = current_time.strftime("%Y%m%d")
+
+ # Change all additional headers to be lower case.
+ full_headers = {}
+ for key in additional_headers:
+ full_headers[key.lower()] = additional_headers[key]
+ # Add AWS session token if available.
+ if security_token is not None:
+ full_headers[_AWS_SECURITY_TOKEN_HEADER] = security_token
+
+ # Required headers
+ full_headers["host"] = host
+ # Do not use generated x-amz-date if the date header is provided.
+ # Previously the date was not fixed with x-amz- and could be provided
+ # manually.
+ # https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req
+ if "date" not in full_headers:
+ full_headers[_AWS_DATE_HEADER] = amz_date
+
+ # Header keys need to be sorted alphabetically.
+ canonical_headers = ""
+ header_keys = list(full_headers.keys())
+ header_keys.sort()
+ for key in header_keys:
+ canonical_headers = "{}{}:{}\n".format(
+ canonical_headers, key, full_headers[key]
+ )
+ signed_headers = ";".join(header_keys)
+
+ payload_hash = hashlib.sha256((request_payload or "").encode("utf-8")).hexdigest()
+
+ # https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
+ canonical_request = "{}\n{}\n{}\n{}\n{}\n{}".format(
+ method,
+ canonical_uri,
+ canonical_querystring,
+ canonical_headers,
+ signed_headers,
+ payload_hash,
+ )
+
+ credential_scope = "{}/{}/{}/{}".format(
+ date_stamp, region, service_name, _AWS_REQUEST_TYPE
+ )
+
+ # https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html
+ string_to_sign = "{}\n{}\n{}\n{}".format(
+ _AWS_ALGORITHM,
+ amz_date,
+ credential_scope,
+ hashlib.sha256(canonical_request.encode("utf-8")).hexdigest(),
+ )
+
+ # https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html
+ signing_key = _get_signing_key(secret_key, date_stamp, region, service_name)
+ signature = hmac.new(
+ signing_key, string_to_sign.encode("utf-8"), hashlib.sha256
+ ).hexdigest()
+
+ # https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html
+ authorization_header = "{} Credential={}/{}, SignedHeaders={}, Signature={}".format(
+ _AWS_ALGORITHM, access_key, credential_scope, signed_headers, signature
+ )
+
+ authentication_header = {"authorization_header": authorization_header}
+ # Do not use generated x-amz-date if the date header is provided.
+ if "date" not in full_headers:
+ authentication_header["amz_date"] = amz_date
+ return authentication_header
+
+
+class Credentials(external_account.Credentials):
+ """AWS external account credentials.
+ This is used to exchange serialized AWS signature v4 signed requests to
+ AWS STS GetCallerIdentity service for Google access tokens.
+ """
+
+ def __init__(
+ self,
+ audience,
+ subject_token_type,
+ token_url,
+ credential_source=None,
+ *args,
+ **kwargs
+ ):
+ """Instantiates an AWS workload external account credentials object.
+
+ Args:
+ audience (str): The STS audience field.
+ subject_token_type (str): The subject token type.
+ token_url (str): The STS endpoint URL.
+ credential_source (Mapping): The credential source dictionary used
+ to provide instructions on how to retrieve external credential
+ to be exchanged for Google access tokens.
+ args (List): Optional positional arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
+ kwargs (Mapping): Optional keyword arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If an error is encountered during
+ access token retrieval logic.
+ ValueError: For invalid parameters.
+
+ .. note:: Typically one of the helper constructors
+ :meth:`from_file` or
+ :meth:`from_info` are used instead of calling the constructor directly.
+ """
+ super(Credentials, self).__init__(
+ audience=audience,
+ subject_token_type=subject_token_type,
+ token_url=token_url,
+ credential_source=credential_source,
+ *args,
+ **kwargs
+ )
+ credential_source = credential_source or {}
+ self._environment_id = credential_source.get("environment_id") or ""
+ self._region_url = credential_source.get("region_url")
+ self._security_credentials_url = credential_source.get("url")
+ self._cred_verification_url = credential_source.get(
+ "regional_cred_verification_url"
+ )
+ self._imdsv2_session_token_url = credential_source.get(
+ "imdsv2_session_token_url"
+ )
+ self._region = None
+ self._request_signer = None
+ self._target_resource = audience
+
+ # Get the environment ID. Currently, only one version supported (v1).
+ matches = re.match(r"^(aws)([\d]+)$", self._environment_id)
+ if matches:
+ env_id, env_version = matches.groups()
+ else:
+ env_id, env_version = (None, None)
+
+ if env_id != "aws" or self._cred_verification_url is None:
+ raise exceptions.InvalidResource(
+ "No valid AWS 'credential_source' provided"
+ )
+ elif int(env_version or "") != 1:
+ raise exceptions.InvalidValue(
+ "aws version '{}' is not supported in the current build.".format(
+ env_version
+ )
+ )
+
+ def retrieve_subject_token(self, request):
+ """Retrieves the subject token using the credential_source object.
+ The subject token is a serialized `AWS GetCallerIdentity signed request`_.
+
+ The logic is summarized as:
+
+ Retrieve the AWS region from the AWS_REGION or AWS_DEFAULT_REGION
+ environment variable or from the AWS metadata server availability-zone
+ if not found in the environment variable.
+
+ Check AWS credentials in environment variables. If not found, retrieve
+ from the AWS metadata server security-credentials endpoint.
+
+ When retrieving AWS credentials from the metadata server
+ security-credentials endpoint, the AWS role needs to be determined by
+ calling the security-credentials endpoint without any argument. Then the
+ credentials can be retrieved via: security-credentials/role_name
+
+ Generate the signed request to AWS STS GetCallerIdentity action.
+
+ Inject x-goog-cloud-target-resource into header and serialize the
+ signed request. This will be the subject-token to pass to GCP STS.
+
+ .. _AWS GetCallerIdentity signed request:
+ https://cloud.google.com/iam/docs/access-resources-aws#exchange-token
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ Returns:
+ str: The retrieved subject token.
+ """
+ # Fetch the session token required to make meta data endpoint calls to aws.
+ if (
+ request is not None
+ and self._imdsv2_session_token_url is not None
+ and self._should_use_metadata_server()
+ ):
+ headers = {"X-aws-ec2-metadata-token-ttl-seconds": "300"}
+
+ imdsv2_session_token_response = request(
+ url=self._imdsv2_session_token_url, method="PUT", headers=headers
+ )
+
+ if imdsv2_session_token_response.status != 200:
+ raise exceptions.RefreshError(
+ "Unable to retrieve AWS Session Token",
+ imdsv2_session_token_response.data,
+ )
+
+ imdsv2_session_token = imdsv2_session_token_response.data
+ else:
+ imdsv2_session_token = None
+
+ # Initialize the request signer if not yet initialized after determining
+ # the current AWS region.
+ if self._request_signer is None:
+ self._region = self._get_region(
+ request, self._region_url, imdsv2_session_token
+ )
+ self._request_signer = RequestSigner(self._region)
+
+ # Retrieve the AWS security credentials needed to generate the signed
+ # request.
+ aws_security_credentials = self._get_security_credentials(
+ request, imdsv2_session_token
+ )
+ # Generate the signed request to AWS STS GetCallerIdentity API.
+ # Use the required regional endpoint. Otherwise, the request will fail.
+ request_options = self._request_signer.get_request_options(
+ aws_security_credentials,
+ self._cred_verification_url.replace("{region}", self._region),
+ "POST",
+ )
+ # The GCP STS endpoint expects the headers to be formatted as:
+ # [
+ # {key: 'x-amz-date', value: '...'},
+ # {key: 'Authorization', value: '...'},
+ # ...
+ # ]
+ # And then serialized as:
+ # quote(json.dumps({
+ # url: '...',
+ # method: 'POST',
+ # headers: [{key: 'x-amz-date', value: '...'}, ...]
+ # }))
+ request_headers = request_options.get("headers")
+ # The full, canonical resource name of the workload identity pool
+ # provider, with or without the HTTPS prefix.
+ # Including this header as part of the signature is recommended to
+ # ensure data integrity.
+ request_headers["x-goog-cloud-target-resource"] = self._target_resource
+
+ # Serialize AWS signed request.
+ # Keeping inner keys in sorted order makes testing easier for Python
+ # versions <=3.5 as the stringified JSON string would have a predictable
+ # key order.
+ aws_signed_req = {}
+ aws_signed_req["url"] = request_options.get("url")
+ aws_signed_req["method"] = request_options.get("method")
+ aws_signed_req["headers"] = []
+ # Reformat header to GCP STS expected format.
+ for key in sorted(request_headers.keys()):
+ aws_signed_req["headers"].append(
+ {"key": key, "value": request_headers[key]}
+ )
+
+ return urllib.parse.quote(
+ json.dumps(aws_signed_req, separators=(",", ":"), sort_keys=True)
+ )
+
+ def _get_region(self, request, url, imdsv2_session_token):
+ """Retrieves the current AWS region from either the AWS_REGION or
+ AWS_DEFAULT_REGION environment variable or from the AWS metadata server.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ url (str): The AWS metadata server region URL.
+ imdsv2_session_token (str): The AWS IMDSv2 session token to be added as a
+ header in the requests to AWS metadata endpoint.
+
+ Returns:
+ str: The current AWS region.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If an error occurs while
+ retrieving the AWS region.
+ """
+ # The AWS metadata server is not available in some AWS environments
+ # such as AWS lambda. Instead, it is available via environment
+ # variable.
+ env_aws_region = os.environ.get(environment_vars.AWS_REGION)
+ if env_aws_region is not None:
+ return env_aws_region
+
+ env_aws_region = os.environ.get(environment_vars.AWS_DEFAULT_REGION)
+ if env_aws_region is not None:
+ return env_aws_region
+
+ if not self._region_url:
+ raise exceptions.RefreshError("Unable to determine AWS region")
+
+ headers = None
+ if imdsv2_session_token is not None:
+ headers = {"X-aws-ec2-metadata-token": imdsv2_session_token}
+
+ response = request(url=self._region_url, method="GET", headers=headers)
+
+ # Support both string and bytes type response.data.
+ response_body = (
+ response.data.decode("utf-8")
+ if hasattr(response.data, "decode")
+ else response.data
+ )
+
+ if response.status != 200:
+ raise exceptions.RefreshError(
+ "Unable to retrieve AWS region", response_body
+ )
+
+ # This endpoint will return the region in format: us-east-2b.
+ # Only the us-east-2 part should be used.
+ return response_body[:-1]
+
+ def _get_security_credentials(self, request, imdsv2_session_token):
+ """Retrieves the AWS security credentials required for signing AWS
+ requests from either the AWS security credentials environment variables
+ or from the AWS metadata server.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ imdsv2_session_token (str): The AWS IMDSv2 session token to be added as a
+ header in the requests to AWS metadata endpoint.
+
+ Returns:
+ Mapping[str, str]: The AWS security credentials dictionary object.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If an error occurs while
+ retrieving the AWS security credentials.
+ """
+
+ # Check environment variables for permanent credentials first.
+ # https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html
+ env_aws_access_key_id = os.environ.get(environment_vars.AWS_ACCESS_KEY_ID)
+ env_aws_secret_access_key = os.environ.get(
+ environment_vars.AWS_SECRET_ACCESS_KEY
+ )
+ # This is normally not available for permanent credentials.
+ env_aws_session_token = os.environ.get(environment_vars.AWS_SESSION_TOKEN)
+ if env_aws_access_key_id and env_aws_secret_access_key:
+ return {
+ "access_key_id": env_aws_access_key_id,
+ "secret_access_key": env_aws_secret_access_key,
+ "security_token": env_aws_session_token,
+ }
+
+ # Get role name.
+ role_name = self._get_metadata_role_name(request, imdsv2_session_token)
+
+ # Get security credentials.
+ credentials = self._get_metadata_security_credentials(
+ request, role_name, imdsv2_session_token
+ )
+
+ return {
+ "access_key_id": credentials.get("AccessKeyId"),
+ "secret_access_key": credentials.get("SecretAccessKey"),
+ "security_token": credentials.get("Token"),
+ }
+
+ def _get_metadata_security_credentials(
+ self, request, role_name, imdsv2_session_token
+ ):
+ """Retrieves the AWS security credentials required for signing AWS
+ requests from the AWS metadata server.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ role_name (str): The AWS role name required by the AWS metadata
+ server security_credentials endpoint in order to return the
+ credentials.
+ imdsv2_session_token (str): The AWS IMDSv2 session token to be added as a
+ header in the requests to AWS metadata endpoint.
+
+ Returns:
+ Mapping[str, str]: The AWS metadata server security credentials
+ response.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If an error occurs while
+ retrieving the AWS security credentials.
+ """
+ headers = {"Content-Type": "application/json"}
+ if imdsv2_session_token is not None:
+ headers["X-aws-ec2-metadata-token"] = imdsv2_session_token
+
+ response = request(
+ url="{}/{}".format(self._security_credentials_url, role_name),
+ method="GET",
+ headers=headers,
+ )
+
+ # support both string and bytes type response.data
+ response_body = (
+ response.data.decode("utf-8")
+ if hasattr(response.data, "decode")
+ else response.data
+ )
+
+ if response.status != http_client.OK:
+ raise exceptions.RefreshError(
+ "Unable to retrieve AWS security credentials", response_body
+ )
+
+ credentials_response = json.loads(response_body)
+
+ return credentials_response
+
+ def _get_metadata_role_name(self, request, imdsv2_session_token):
+ """Retrieves the AWS role currently attached to the current AWS
+ workload by querying the AWS metadata server. This is needed for the
+ AWS metadata server security credentials endpoint in order to retrieve
+ the AWS security credentials needed to sign requests to AWS APIs.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ imdsv2_session_token (str): The AWS IMDSv2 session token to be added as a
+ header in the requests to AWS metadata endpoint.
+
+ Returns:
+ str: The AWS role name.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If an error occurs while
+ retrieving the AWS role name.
+ """
+ if self._security_credentials_url is None:
+ raise exceptions.RefreshError(
+ "Unable to determine the AWS metadata server security credentials endpoint"
+ )
+
+ headers = None
+ if imdsv2_session_token is not None:
+ headers = {"X-aws-ec2-metadata-token": imdsv2_session_token}
+
+ response = request(
+ url=self._security_credentials_url, method="GET", headers=headers
+ )
+
+ # support both string and bytes type response.data
+ response_body = (
+ response.data.decode("utf-8")
+ if hasattr(response.data, "decode")
+ else response.data
+ )
+
+ if response.status != http_client.OK:
+ raise exceptions.RefreshError(
+ "Unable to retrieve AWS role name", response_body
+ )
+
+ return response_body
+
+ def _should_use_metadata_server(self):
+ # The AWS region can be provided through AWS_REGION or AWS_DEFAULT_REGION.
+ # The metadata server should be used if it cannot be retrieved from one of
+ # these environment variables.
+ if not os.environ.get(environment_vars.AWS_REGION) and not os.environ.get(
+ environment_vars.AWS_DEFAULT_REGION
+ ):
+ return True
+
+ # AWS security credentials can be retrieved from the AWS_ACCESS_KEY_ID
+ # and AWS_SECRET_ACCESS_KEY environment variables. The metadata server
+ # should be used if either of these are not available.
+ if not os.environ.get(environment_vars.AWS_ACCESS_KEY_ID) or not os.environ.get(
+ environment_vars.AWS_SECRET_ACCESS_KEY
+ ):
+ return True
+
+ return False
+
+ def _create_default_metrics_options(self):
+ metrics_options = super(Credentials, self)._create_default_metrics_options()
+ metrics_options["source"] = "aws"
+ return metrics_options
+
+ @classmethod
+ def from_info(cls, info, **kwargs):
+ """Creates an AWS Credentials instance from parsed external account info.
+
+ Args:
+ info (Mapping[str, str]): The AWS external account info in Google
+ format.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.aws.Credentials: The constructed credentials.
+
+ Raises:
+ ValueError: For invalid parameters.
+ """
+ return super(Credentials, cls).from_info(info, **kwargs)
+
+ @classmethod
+ def from_file(cls, filename, **kwargs):
+ """Creates an AWS Credentials instance from an external account json file.
+
+ Args:
+ filename (str): The path to the AWS external account json file.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.aws.Credentials: The constructed credentials.
+ """
+ return super(Credentials, cls).from_file(filename, **kwargs)
diff --git a/Lib/site-packages/google/auth/compute_engine/__init__.py b/Lib/site-packages/google/auth/compute_engine/__init__.py
new file mode 100644
index 0000000..7e1206f
--- /dev/null
+++ b/Lib/site-packages/google/auth/compute_engine/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google Compute Engine authentication."""
+
+from google.auth.compute_engine._metadata import detect_gce_residency_linux
+from google.auth.compute_engine.credentials import Credentials
+from google.auth.compute_engine.credentials import IDTokenCredentials
+
+
+__all__ = ["Credentials", "IDTokenCredentials", "detect_gce_residency_linux"]
diff --git a/Lib/site-packages/google/auth/compute_engine/_metadata.py b/Lib/site-packages/google/auth/compute_engine/_metadata.py
new file mode 100644
index 0000000..1c884c3
--- /dev/null
+++ b/Lib/site-packages/google/auth/compute_engine/_metadata.py
@@ -0,0 +1,360 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Provides helper methods for talking to the Compute Engine metadata server.
+
+See https://cloud.google.com/compute/docs/metadata for more details.
+"""
+
+import datetime
+import http.client as http_client
+import json
+import logging
+import os
+from urllib.parse import urljoin
+
+from google.auth import _helpers
+from google.auth import environment_vars
+from google.auth import exceptions
+from google.auth import metrics
+
+_LOGGER = logging.getLogger(__name__)
+
+# Environment variable GCE_METADATA_HOST is originally named
+# GCE_METADATA_ROOT. For compatiblity reasons, here it checks
+# the new variable first; if not set, the system falls back
+# to the old variable.
+_GCE_METADATA_HOST = os.getenv(environment_vars.GCE_METADATA_HOST, None)
+if not _GCE_METADATA_HOST:
+ _GCE_METADATA_HOST = os.getenv(
+ environment_vars.GCE_METADATA_ROOT, "metadata.google.internal"
+ )
+_METADATA_ROOT = "http://{}/computeMetadata/v1/".format(_GCE_METADATA_HOST)
+
+# This is used to ping the metadata server, it avoids the cost of a DNS
+# lookup.
+_METADATA_IP_ROOT = "http://{}".format(
+ os.getenv(environment_vars.GCE_METADATA_IP, "169.254.169.254")
+)
+_METADATA_FLAVOR_HEADER = "metadata-flavor"
+_METADATA_FLAVOR_VALUE = "Google"
+_METADATA_HEADERS = {_METADATA_FLAVOR_HEADER: _METADATA_FLAVOR_VALUE}
+
+# Timeout in seconds to wait for the GCE metadata server when detecting the
+# GCE environment.
+try:
+ _METADATA_DEFAULT_TIMEOUT = int(os.getenv("GCE_METADATA_TIMEOUT", 3))
+except ValueError: # pragma: NO COVER
+ _METADATA_DEFAULT_TIMEOUT = 3
+
+# Detect GCE Residency
+_GOOGLE = "Google"
+_GCE_PRODUCT_NAME_FILE = "/sys/class/dmi/id/product_name"
+
+
+def is_on_gce(request):
+ """Checks to see if the code runs on Google Compute Engine
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+
+ Returns:
+ bool: True if the code runs on Google Compute Engine, False otherwise.
+ """
+ if ping(request):
+ return True
+
+ if os.name == "nt":
+ # TODO: implement GCE residency detection on Windows
+ return False
+
+ # Detect GCE residency on Linux
+ return detect_gce_residency_linux()
+
+
+def detect_gce_residency_linux():
+ """Detect Google Compute Engine residency by smbios check on Linux
+
+ Returns:
+ bool: True if the GCE product name file is detected, False otherwise.
+ """
+ try:
+ with open(_GCE_PRODUCT_NAME_FILE, "r") as file_obj:
+ content = file_obj.read().strip()
+
+ except Exception:
+ return False
+
+ return content.startswith(_GOOGLE)
+
+
+def ping(request, timeout=_METADATA_DEFAULT_TIMEOUT, retry_count=3):
+ """Checks to see if the metadata server is available.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ timeout (int): How long to wait for the metadata server to respond.
+ retry_count (int): How many times to attempt connecting to metadata
+ server using above timeout.
+
+ Returns:
+ bool: True if the metadata server is reachable, False otherwise.
+ """
+ # NOTE: The explicit ``timeout`` is a workaround. The underlying
+ # issue is that resolving an unknown host on some networks will take
+ # 20-30 seconds; making this timeout short fixes the issue, but
+ # could lead to false negatives in the event that we are on GCE, but
+ # the metadata resolution was particularly slow. The latter case is
+ # "unlikely".
+ retries = 0
+ headers = _METADATA_HEADERS.copy()
+ headers[metrics.API_CLIENT_HEADER] = metrics.mds_ping()
+
+ while retries < retry_count:
+ try:
+ response = request(
+ url=_METADATA_IP_ROOT, method="GET", headers=headers, timeout=timeout
+ )
+
+ metadata_flavor = response.headers.get(_METADATA_FLAVOR_HEADER)
+ return (
+ response.status == http_client.OK
+ and metadata_flavor == _METADATA_FLAVOR_VALUE
+ )
+
+ except exceptions.TransportError as e:
+ _LOGGER.warning(
+ "Compute Engine Metadata server unavailable on "
+ "attempt %s of %s. Reason: %s",
+ retries + 1,
+ retry_count,
+ e,
+ )
+ retries += 1
+
+ return False
+
+
+def get(
+ request,
+ path,
+ root=_METADATA_ROOT,
+ params=None,
+ recursive=False,
+ retry_count=5,
+ headers=None,
+ return_none_for_not_found_error=False,
+):
+ """Fetch a resource from the metadata server.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ path (str): The resource to retrieve. For example,
+ ``'instance/service-accounts/default'``.
+ root (str): The full path to the metadata server root.
+ params (Optional[Mapping[str, str]]): A mapping of query parameter
+ keys to values.
+ recursive (bool): Whether to do a recursive query of metadata. See
+ https://cloud.google.com/compute/docs/metadata#aggcontents for more
+ details.
+ retry_count (int): How many times to attempt connecting to metadata
+ server using above timeout.
+ headers (Optional[Mapping[str, str]]): Headers for the request.
+ return_none_for_not_found_error (Optional[bool]): If True, returns None
+ for 404 error instead of throwing an exception.
+
+ Returns:
+ Union[Mapping, str]: If the metadata server returns JSON, a mapping of
+ the decoded JSON is return. Otherwise, the response content is
+ returned as a string.
+
+ Raises:
+ google.auth.exceptions.TransportError: if an error occurred while
+ retrieving metadata.
+ """
+ base_url = urljoin(root, path)
+ query_params = {} if params is None else params
+
+ headers_to_use = _METADATA_HEADERS.copy()
+ if headers:
+ headers_to_use.update(headers)
+
+ if recursive:
+ query_params["recursive"] = "true"
+
+ url = _helpers.update_query(base_url, query_params)
+
+ retries = 0
+ while retries < retry_count:
+ try:
+ response = request(url=url, method="GET", headers=headers_to_use)
+ break
+
+ except exceptions.TransportError as e:
+ _LOGGER.warning(
+ "Compute Engine Metadata server unavailable on "
+ "attempt %s of %s. Reason: %s",
+ retries + 1,
+ retry_count,
+ e,
+ )
+ retries += 1
+ else:
+ raise exceptions.TransportError(
+ "Failed to retrieve {} from the Google Compute Engine "
+ "metadata service. Compute Engine Metadata server unavailable".format(url)
+ )
+
+ content = _helpers.from_bytes(response.data)
+
+ if response.status == http_client.NOT_FOUND and return_none_for_not_found_error:
+ _LOGGER.info(
+ "Compute Engine Metadata server call to %s returned 404, reason: %s",
+ path,
+ content,
+ )
+ return None
+
+ if response.status == http_client.OK:
+ if (
+ _helpers.parse_content_type(response.headers["content-type"])
+ == "application/json"
+ ):
+ try:
+ return json.loads(content)
+ except ValueError as caught_exc:
+ new_exc = exceptions.TransportError(
+ "Received invalid JSON from the Google Compute Engine "
+ "metadata service: {:.20}".format(content)
+ )
+ raise new_exc from caught_exc
+ else:
+ return content
+
+ raise exceptions.TransportError(
+ "Failed to retrieve {} from the Google Compute Engine "
+ "metadata service. Status: {} Response:\n{}".format(
+ url, response.status, response.data
+ ),
+ response,
+ )
+
+
+def get_project_id(request):
+ """Get the Google Cloud Project ID from the metadata server.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+
+ Returns:
+ str: The project ID
+
+ Raises:
+ google.auth.exceptions.TransportError: if an error occurred while
+ retrieving metadata.
+ """
+ return get(request, "project/project-id")
+
+
+def get_universe_domain(request):
+ """Get the universe domain value from the metadata server.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+
+ Returns:
+ str: The universe domain value. If the universe domain endpoint is not
+ not found, return the default value, which is googleapis.com
+
+ Raises:
+ google.auth.exceptions.TransportError: if an error other than
+ 404 occurs while retrieving metadata.
+ """
+ universe_domain = get(
+ request, "universe/universe_domain", return_none_for_not_found_error=True
+ )
+ if not universe_domain:
+ return "googleapis.com"
+ return universe_domain
+
+
+def get_service_account_info(request, service_account="default"):
+ """Get information about a service account from the metadata server.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ service_account (str): The string 'default' or a service account email
+ address. The determines which service account for which to acquire
+ information.
+
+ Returns:
+ Mapping: The service account's information, for example::
+
+ {
+ 'email': '...',
+ 'scopes': ['scope', ...],
+ 'aliases': ['default', '...']
+ }
+
+ Raises:
+ google.auth.exceptions.TransportError: if an error occurred while
+ retrieving metadata.
+ """
+ path = "instance/service-accounts/{0}/".format(service_account)
+ # See https://cloud.google.com/compute/docs/metadata#aggcontents
+ # for more on the use of 'recursive'.
+ return get(request, path, params={"recursive": "true"})
+
+
+def get_service_account_token(request, service_account="default", scopes=None):
+ """Get the OAuth 2.0 access token for a service account.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ service_account (str): The string 'default' or a service account email
+ address. The determines which service account for which to acquire
+ an access token.
+ scopes (Optional[Union[str, List[str]]]): Optional string or list of
+ strings with auth scopes.
+ Returns:
+ Tuple[str, datetime]: The access token and its expiration.
+
+ Raises:
+ google.auth.exceptions.TransportError: if an error occurred while
+ retrieving metadata.
+ """
+ if scopes:
+ if not isinstance(scopes, str):
+ scopes = ",".join(scopes)
+ params = {"scopes": scopes}
+ else:
+ params = None
+
+ metrics_header = {
+ metrics.API_CLIENT_HEADER: metrics.token_request_access_token_mds()
+ }
+
+ path = "instance/service-accounts/{0}/token".format(service_account)
+ token_json = get(request, path, params=params, headers=metrics_header)
+ token_expiry = _helpers.utcnow() + datetime.timedelta(
+ seconds=token_json["expires_in"]
+ )
+ return token_json["access_token"], token_expiry
diff --git a/Lib/site-packages/google/auth/compute_engine/credentials.py b/Lib/site-packages/google/auth/compute_engine/credentials.py
new file mode 100644
index 0000000..7541c1d
--- /dev/null
+++ b/Lib/site-packages/google/auth/compute_engine/credentials.py
@@ -0,0 +1,487 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google Compute Engine credentials.
+
+This module provides authentication for an application running on Google
+Compute Engine using the Compute Engine metadata server.
+
+"""
+
+import datetime
+
+from google.auth import _helpers
+from google.auth import credentials
+from google.auth import exceptions
+from google.auth import iam
+from google.auth import jwt
+from google.auth import metrics
+from google.auth.compute_engine import _metadata
+from google.auth.transport import requests as google_auth_requests
+from google.oauth2 import _client
+
+
+class Credentials(
+ credentials.Scoped,
+ credentials.CredentialsWithQuotaProject,
+ credentials.CredentialsWithUniverseDomain,
+):
+ """Compute Engine Credentials.
+
+ These credentials use the Google Compute Engine metadata server to obtain
+ OAuth 2.0 access tokens associated with the instance's service account,
+ and are also used for Cloud Run, Flex and App Engine (except for the Python
+ 2.7 runtime, which is supported only on older versions of this library).
+
+ For more information about Compute Engine authentication, including how
+ to configure scopes, see the `Compute Engine authentication
+ documentation`_.
+
+ .. note:: On Compute Engine the metadata server ignores requested scopes.
+ On Cloud Run, Flex and App Engine the server honours requested scopes.
+
+ .. _Compute Engine authentication documentation:
+ https://cloud.google.com/compute/docs/authentication#using
+ """
+
+ def __init__(
+ self,
+ service_account_email="default",
+ quota_project_id=None,
+ scopes=None,
+ default_scopes=None,
+ universe_domain=None,
+ ):
+ """
+ Args:
+ service_account_email (str): The service account email to use, or
+ 'default'. A Compute Engine instance may have multiple service
+ accounts.
+ quota_project_id (Optional[str]): The project ID used for quota and
+ billing.
+ scopes (Optional[Sequence[str]]): The list of scopes for the credentials.
+ default_scopes (Optional[Sequence[str]]): Default scopes passed by a
+ Google client library. Use 'scopes' for user-defined scopes.
+ universe_domain (Optional[str]): The universe domain. If not
+ provided or None, credential will attempt to fetch the value
+ from metadata server. If metadata server doesn't have universe
+ domain endpoint, then the default googleapis.com will be used.
+ """
+ super(Credentials, self).__init__()
+ self._service_account_email = service_account_email
+ self._quota_project_id = quota_project_id
+ self._scopes = scopes
+ self._default_scopes = default_scopes
+ self._universe_domain_cached = False
+ self._universe_domain_request = google_auth_requests.Request()
+ if universe_domain:
+ self._universe_domain = universe_domain
+ self._universe_domain_cached = True
+
+ def _retrieve_info(self, request):
+ """Retrieve information about the service account.
+
+ Updates the scopes and retrieves the full service account email.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ """
+ info = _metadata.get_service_account_info(
+ request, service_account=self._service_account_email
+ )
+
+ self._service_account_email = info["email"]
+
+ # Don't override scopes requested by the user.
+ if self._scopes is None:
+ self._scopes = info["scopes"]
+
+ def _metric_header_for_usage(self):
+ return metrics.CRED_TYPE_SA_MDS
+
+ def refresh(self, request):
+ """Refresh the access token and scopes.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the Compute Engine metadata
+ service can't be reached if if the instance has not
+ credentials.
+ """
+ scopes = self._scopes if self._scopes is not None else self._default_scopes
+ try:
+ self._retrieve_info(request)
+ self.token, self.expiry = _metadata.get_service_account_token(
+ request, service_account=self._service_account_email, scopes=scopes
+ )
+ except exceptions.TransportError as caught_exc:
+ new_exc = exceptions.RefreshError(caught_exc)
+ raise new_exc from caught_exc
+
+ @property
+ def service_account_email(self):
+ """The service account email.
+
+ .. note:: This is not guaranteed to be set until :meth:`refresh` has been
+ called.
+ """
+ return self._service_account_email
+
+ @property
+ def requires_scopes(self):
+ return not self._scopes
+
+ @property
+ def universe_domain(self):
+ if self._universe_domain_cached:
+ return self._universe_domain
+ self._universe_domain = _metadata.get_universe_domain(
+ self._universe_domain_request
+ )
+ self._universe_domain_cached = True
+ return self._universe_domain
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ creds = self.__class__(
+ service_account_email=self._service_account_email,
+ quota_project_id=quota_project_id,
+ scopes=self._scopes,
+ default_scopes=self._default_scopes,
+ )
+ creds._universe_domain = self._universe_domain
+ creds._universe_domain_cached = self._universe_domain_cached
+ return creds
+
+ @_helpers.copy_docstring(credentials.Scoped)
+ def with_scopes(self, scopes, default_scopes=None):
+ # Compute Engine credentials can not be scoped (the metadata service
+ # ignores the scopes parameter). App Engine, Cloud Run and Flex support
+ # requesting scopes.
+ creds = self.__class__(
+ scopes=scopes,
+ default_scopes=default_scopes,
+ service_account_email=self._service_account_email,
+ quota_project_id=self._quota_project_id,
+ )
+ creds._universe_domain = self._universe_domain
+ creds._universe_domain_cached = self._universe_domain_cached
+ return creds
+
+ @_helpers.copy_docstring(credentials.CredentialsWithUniverseDomain)
+ def with_universe_domain(self, universe_domain):
+ return self.__class__(
+ scopes=self._scopes,
+ default_scopes=self._default_scopes,
+ service_account_email=self._service_account_email,
+ quota_project_id=self._quota_project_id,
+ universe_domain=universe_domain,
+ )
+
+
+_DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
+_DEFAULT_TOKEN_URI = "https://www.googleapis.com/oauth2/v4/token"
+
+
+class IDTokenCredentials(
+ credentials.CredentialsWithQuotaProject,
+ credentials.Signing,
+ credentials.CredentialsWithTokenUri,
+):
+ """Open ID Connect ID Token-based service account credentials.
+
+ These credentials relies on the default service account of a GCE instance.
+
+ ID token can be requested from `GCE metadata server identity endpoint`_, IAM
+ token endpoint or other token endpoints you specify. If metadata server
+ identity endpoint is not used, the GCE instance must have been started with
+ a service account that has access to the IAM Cloud API.
+
+ .. _GCE metadata server identity endpoint:
+ https://cloud.google.com/compute/docs/instances/verifying-instance-identity
+ """
+
+ def __init__(
+ self,
+ request,
+ target_audience,
+ token_uri=None,
+ additional_claims=None,
+ service_account_email=None,
+ signer=None,
+ use_metadata_identity_endpoint=False,
+ quota_project_id=None,
+ ):
+ """
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ target_audience (str): The intended audience for these credentials,
+ used when requesting the ID Token. The ID Token's ``aud`` claim
+ will be set to this string.
+ token_uri (str): The OAuth 2.0 Token URI.
+ additional_claims (Mapping[str, str]): Any additional claims for
+ the JWT assertion used in the authorization grant.
+ service_account_email (str): Optional explicit service account to
+ use to sign JWT tokens.
+ By default, this is the default GCE service account.
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ In case the signer is specified, the request argument will be
+ ignored.
+ use_metadata_identity_endpoint (bool): Whether to use GCE metadata
+ identity endpoint. For backward compatibility the default value
+ is False. If set to True, ``token_uri``, ``additional_claims``,
+ ``service_account_email``, ``signer`` argument should not be set;
+ otherwise ValueError will be raised.
+ quota_project_id (Optional[str]): The project ID used for quota and
+ billing.
+
+ Raises:
+ ValueError:
+ If ``use_metadata_identity_endpoint`` is set to True, and one of
+ ``token_uri``, ``additional_claims``, ``service_account_email``,
+ ``signer`` arguments is set.
+ """
+ super(IDTokenCredentials, self).__init__()
+
+ self._quota_project_id = quota_project_id
+ self._use_metadata_identity_endpoint = use_metadata_identity_endpoint
+ self._target_audience = target_audience
+
+ if use_metadata_identity_endpoint:
+ if token_uri or additional_claims or service_account_email or signer:
+ raise exceptions.MalformedError(
+ "If use_metadata_identity_endpoint is set, token_uri, "
+ "additional_claims, service_account_email, signer arguments"
+ " must not be set"
+ )
+ self._token_uri = None
+ self._additional_claims = None
+ self._signer = None
+
+ if service_account_email is None:
+ sa_info = _metadata.get_service_account_info(request)
+ self._service_account_email = sa_info["email"]
+ else:
+ self._service_account_email = service_account_email
+
+ if not use_metadata_identity_endpoint:
+ if signer is None:
+ signer = iam.Signer(
+ request=request,
+ credentials=Credentials(),
+ service_account_email=self._service_account_email,
+ )
+ self._signer = signer
+ self._token_uri = token_uri or _DEFAULT_TOKEN_URI
+
+ if additional_claims is not None:
+ self._additional_claims = additional_claims
+ else:
+ self._additional_claims = {}
+
+ def with_target_audience(self, target_audience):
+ """Create a copy of these credentials with the specified target
+ audience.
+ Args:
+ target_audience (str): The intended audience for these credentials,
+ used when requesting the ID Token.
+ Returns:
+ google.auth.service_account.IDTokenCredentials: A new credentials
+ instance.
+ """
+ # since the signer is already instantiated,
+ # the request is not needed
+ if self._use_metadata_identity_endpoint:
+ return self.__class__(
+ None,
+ target_audience=target_audience,
+ use_metadata_identity_endpoint=True,
+ quota_project_id=self._quota_project_id,
+ )
+ else:
+ return self.__class__(
+ None,
+ service_account_email=self._service_account_email,
+ token_uri=self._token_uri,
+ target_audience=target_audience,
+ additional_claims=self._additional_claims.copy(),
+ signer=self.signer,
+ use_metadata_identity_endpoint=False,
+ quota_project_id=self._quota_project_id,
+ )
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+
+ # since the signer is already instantiated,
+ # the request is not needed
+ if self._use_metadata_identity_endpoint:
+ return self.__class__(
+ None,
+ target_audience=self._target_audience,
+ use_metadata_identity_endpoint=True,
+ quota_project_id=quota_project_id,
+ )
+ else:
+ return self.__class__(
+ None,
+ service_account_email=self._service_account_email,
+ token_uri=self._token_uri,
+ target_audience=self._target_audience,
+ additional_claims=self._additional_claims.copy(),
+ signer=self.signer,
+ use_metadata_identity_endpoint=False,
+ quota_project_id=quota_project_id,
+ )
+
+ @_helpers.copy_docstring(credentials.CredentialsWithTokenUri)
+ def with_token_uri(self, token_uri):
+
+ # since the signer is already instantiated,
+ # the request is not needed
+ if self._use_metadata_identity_endpoint:
+ raise exceptions.MalformedError(
+ "If use_metadata_identity_endpoint is set, token_uri" " must not be set"
+ )
+ else:
+ return self.__class__(
+ None,
+ service_account_email=self._service_account_email,
+ token_uri=token_uri,
+ target_audience=self._target_audience,
+ additional_claims=self._additional_claims.copy(),
+ signer=self.signer,
+ use_metadata_identity_endpoint=False,
+ quota_project_id=self.quota_project_id,
+ )
+
+ def _make_authorization_grant_assertion(self):
+ """Create the OAuth 2.0 assertion.
+ This assertion is used during the OAuth 2.0 grant to acquire an
+ ID token.
+ Returns:
+ bytes: The authorization grant assertion.
+ """
+ now = _helpers.utcnow()
+ lifetime = datetime.timedelta(seconds=_DEFAULT_TOKEN_LIFETIME_SECS)
+ expiry = now + lifetime
+
+ payload = {
+ "iat": _helpers.datetime_to_secs(now),
+ "exp": _helpers.datetime_to_secs(expiry),
+ # The issuer must be the service account email.
+ "iss": self.service_account_email,
+ # The audience must be the auth token endpoint's URI
+ "aud": self._token_uri,
+ # The target audience specifies which service the ID token is
+ # intended for.
+ "target_audience": self._target_audience,
+ }
+
+ payload.update(self._additional_claims)
+
+ token = jwt.encode(self._signer, payload)
+
+ return token
+
+ def _call_metadata_identity_endpoint(self, request):
+ """Request ID token from metadata identity endpoint.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+
+ Returns:
+ Tuple[str, datetime.datetime]: The ID token and the expiry of the ID token.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the Compute Engine metadata
+ service can't be reached or if the instance has no credentials.
+ ValueError: If extracting expiry from the obtained ID token fails.
+ """
+ try:
+ path = "instance/service-accounts/default/identity"
+ params = {"audience": self._target_audience, "format": "full"}
+ metrics_header = {
+ metrics.API_CLIENT_HEADER: metrics.token_request_id_token_mds()
+ }
+ id_token = _metadata.get(
+ request, path, params=params, headers=metrics_header
+ )
+ except exceptions.TransportError as caught_exc:
+ new_exc = exceptions.RefreshError(caught_exc)
+ raise new_exc from caught_exc
+
+ _, payload, _, _ = jwt._unverified_decode(id_token)
+ return id_token, datetime.datetime.utcfromtimestamp(payload["exp"])
+
+ def refresh(self, request):
+ """Refreshes the ID token.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the credentials could
+ not be refreshed.
+ ValueError: If extracting expiry from the obtained ID token fails.
+ """
+ if self._use_metadata_identity_endpoint:
+ self.token, self.expiry = self._call_metadata_identity_endpoint(request)
+ else:
+ assertion = self._make_authorization_grant_assertion()
+ access_token, expiry, _ = _client.id_token_jwt_grant(
+ request, self._token_uri, assertion
+ )
+ self.token = access_token
+ self.expiry = expiry
+
+ @property # type: ignore
+ @_helpers.copy_docstring(credentials.Signing)
+ def signer(self):
+ return self._signer
+
+ def sign_bytes(self, message):
+ """Signs the given message.
+
+ Args:
+ message (bytes): The message to sign.
+
+ Returns:
+ bytes: The message's cryptographic signature.
+
+ Raises:
+ ValueError:
+ Signer is not available if metadata identity endpoint is used.
+ """
+ if self._use_metadata_identity_endpoint:
+ raise exceptions.InvalidOperation(
+ "Signer is not available if metadata identity endpoint is used"
+ )
+ return self._signer.sign(message)
+
+ @property
+ def service_account_email(self):
+ """The service account email."""
+ return self._service_account_email
+
+ @property
+ def signer_email(self):
+ return self._service_account_email
diff --git a/Lib/site-packages/google/auth/credentials.py b/Lib/site-packages/google/auth/credentials.py
new file mode 100644
index 0000000..a4fa182
--- /dev/null
+++ b/Lib/site-packages/google/auth/credentials.py
@@ -0,0 +1,511 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Interfaces for credentials."""
+
+import abc
+from enum import Enum
+import os
+
+from google.auth import _helpers, environment_vars
+from google.auth import exceptions
+from google.auth import metrics
+from google.auth._refresh_worker import RefreshThreadManager
+
+
+class Credentials(metaclass=abc.ABCMeta):
+ """Base class for all credentials.
+
+ All credentials have a :attr:`token` that is used for authentication and
+ may also optionally set an :attr:`expiry` to indicate when the token will
+ no longer be valid.
+
+ Most credentials will be :attr:`invalid` until :meth:`refresh` is called.
+ Credentials can do this automatically before the first HTTP request in
+ :meth:`before_request`.
+
+ Although the token and expiration will change as the credentials are
+ :meth:`refreshed ` and used, credentials should be considered
+ immutable. Various credentials will accept configuration such as private
+ keys, scopes, and other options. These options are not changeable after
+ construction. Some classes will provide mechanisms to copy the credentials
+ with modifications such as :meth:`ScopedCredentials.with_scopes`.
+ """
+
+ def __init__(self):
+ self.token = None
+ """str: The bearer token that can be used in HTTP headers to make
+ authenticated requests."""
+ self.expiry = None
+ """Optional[datetime]: When the token expires and is no longer valid.
+ If this is None, the token is assumed to never expire."""
+ self._quota_project_id = None
+ """Optional[str]: Project to use for quota and billing purposes."""
+ self._trust_boundary = None
+ """Optional[dict]: Cache of a trust boundary response which has a list
+ of allowed regions and an encoded string representation of credentials
+ trust boundary."""
+ self._universe_domain = "googleapis.com"
+ """Optional[str]: The universe domain value, default is googleapis.com
+ """
+
+ self._use_non_blocking_refresh = False
+ self._refresh_worker = RefreshThreadManager()
+
+ @property
+ def expired(self):
+ """Checks if the credentials are expired.
+
+ Note that credentials can be invalid but not expired because
+ Credentials with :attr:`expiry` set to None is considered to never
+ expire.
+
+ .. deprecated:: v2.24.0
+ Prefer checking :attr:`token_state` instead.
+ """
+ if not self.expiry:
+ return False
+ # Remove some threshold from expiry to err on the side of reporting
+ # expiration early so that we avoid the 401-refresh-retry loop.
+ skewed_expiry = self.expiry - _helpers.REFRESH_THRESHOLD
+ return _helpers.utcnow() >= skewed_expiry
+
+ @property
+ def valid(self):
+ """Checks the validity of the credentials.
+
+ This is True if the credentials have a :attr:`token` and the token
+ is not :attr:`expired`.
+
+ .. deprecated:: v2.24.0
+ Prefer checking :attr:`token_state` instead.
+ """
+ return self.token is not None and not self.expired
+
+ @property
+ def token_state(self):
+ """
+ See `:obj:`TokenState`
+ """
+ if self.token is None:
+ return TokenState.INVALID
+
+ # Credentials that can't expire are always treated as fresh.
+ if self.expiry is None:
+ return TokenState.FRESH
+
+ expired = _helpers.utcnow() >= self.expiry
+ if expired:
+ return TokenState.INVALID
+
+ is_stale = _helpers.utcnow() >= (self.expiry - _helpers.REFRESH_THRESHOLD)
+ if is_stale:
+ return TokenState.STALE
+
+ return TokenState.FRESH
+
+ @property
+ def quota_project_id(self):
+ """Project to use for quota and billing purposes."""
+ return self._quota_project_id
+
+ @property
+ def universe_domain(self):
+ """The universe domain value."""
+ return self._universe_domain
+
+ @abc.abstractmethod
+ def refresh(self, request):
+ """Refreshes the access token.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the credentials could
+ not be refreshed.
+ """
+ # pylint: disable=missing-raises-doc
+ # (pylint doesn't recognize that this is abstract)
+ raise NotImplementedError("Refresh must be implemented")
+
+ def _metric_header_for_usage(self):
+ """The x-goog-api-client header for token usage metric.
+
+ This header will be added to the API service requests in before_request
+ method. For example, "cred-type/sa-jwt" means service account self
+ signed jwt access token is used in the API service request
+ authorization header. Children credentials classes need to override
+ this method to provide the header value, if the token usage metric is
+ needed.
+
+ Returns:
+ str: The x-goog-api-client header value.
+ """
+ return None
+
+ def apply(self, headers, token=None):
+ """Apply the token to the authentication header.
+
+ Args:
+ headers (Mapping): The HTTP request headers.
+ token (Optional[str]): If specified, overrides the current access
+ token.
+ """
+ headers["authorization"] = "Bearer {}".format(
+ _helpers.from_bytes(token or self.token)
+ )
+ """Trust boundary value will be a cached value from global lookup.
+
+ The response of trust boundary will be a list of regions and a hex
+ encoded representation.
+
+ An example of global lookup response:
+ {
+ "locations": [
+ "us-central1", "us-east1", "europe-west1", "asia-east1"
+ ]
+ "encoded_locations": "0xA30"
+ }
+ """
+ if self._trust_boundary is not None:
+ headers["x-allowed-locations"] = self._trust_boundary["encoded_locations"]
+ if self.quota_project_id:
+ headers["x-goog-user-project"] = self.quota_project_id
+
+ def _blocking_refresh(self, request):
+ if not self.valid:
+ self.refresh(request)
+
+ def _non_blocking_refresh(self, request):
+ use_blocking_refresh_fallback = False
+
+ if self.token_state == TokenState.STALE:
+ use_blocking_refresh_fallback = not self._refresh_worker.start_refresh(
+ self, request
+ )
+
+ if self.token_state == TokenState.INVALID or use_blocking_refresh_fallback:
+ self.refresh(request)
+ # If the blocking refresh succeeds then we can clear the error info
+ # on the background refresh worker, and perform refreshes in a
+ # background thread.
+ self._refresh_worker.clear_error()
+
+ def before_request(self, request, method, url, headers):
+ """Performs credential-specific before request logic.
+
+ Refreshes the credentials if necessary, then calls :meth:`apply` to
+ apply the token to the authentication header.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ method (str): The request's HTTP method or the RPC method being
+ invoked.
+ url (str): The request's URI or the RPC service's URI.
+ headers (Mapping): The request's headers.
+ """
+ # pylint: disable=unused-argument
+ # (Subclasses may use these arguments to ascertain information about
+ # the http request.)
+ if self._use_non_blocking_refresh:
+ self._non_blocking_refresh(request)
+ else:
+ self._blocking_refresh(request)
+
+ metrics.add_metric_header(headers, self._metric_header_for_usage())
+ self.apply(headers)
+
+ def with_non_blocking_refresh(self):
+ self._use_non_blocking_refresh = True
+
+
+class CredentialsWithQuotaProject(Credentials):
+ """Abstract base for credentials supporting ``with_quota_project`` factory"""
+
+ def with_quota_project(self, quota_project_id):
+ """Returns a copy of these credentials with a modified quota project.
+
+ Args:
+ quota_project_id (str): The project to use for quota and
+ billing purposes
+
+ Returns:
+ google.auth.credentials.Credentials: A new credentials instance.
+ """
+ raise NotImplementedError("This credential does not support quota project.")
+
+ def with_quota_project_from_environment(self):
+ quota_from_env = os.environ.get(environment_vars.GOOGLE_CLOUD_QUOTA_PROJECT)
+ if quota_from_env:
+ return self.with_quota_project(quota_from_env)
+ return self
+
+
+class CredentialsWithTokenUri(Credentials):
+ """Abstract base for credentials supporting ``with_token_uri`` factory"""
+
+ def with_token_uri(self, token_uri):
+ """Returns a copy of these credentials with a modified token uri.
+
+ Args:
+ token_uri (str): The uri to use for fetching/exchanging tokens
+
+ Returns:
+ google.auth.credentials.Credentials: A new credentials instance.
+ """
+ raise NotImplementedError("This credential does not use token uri.")
+
+
+class CredentialsWithUniverseDomain(Credentials):
+ """Abstract base for credentials supporting ``with_universe_domain`` factory"""
+
+ def with_universe_domain(self, universe_domain):
+ """Returns a copy of these credentials with a modified universe domain.
+
+ Args:
+ universe_domain (str): The universe domain to use
+
+ Returns:
+ google.auth.credentials.Credentials: A new credentials instance.
+ """
+ raise NotImplementedError(
+ "This credential does not support with_universe_domain."
+ )
+
+
+class AnonymousCredentials(Credentials):
+ """Credentials that do not provide any authentication information.
+
+ These are useful in the case of services that support anonymous access or
+ local service emulators that do not use credentials.
+ """
+
+ @property
+ def expired(self):
+ """Returns `False`, anonymous credentials never expire."""
+ return False
+
+ @property
+ def valid(self):
+ """Returns `True`, anonymous credentials are always valid."""
+ return True
+
+ def refresh(self, request):
+ """Raises :class:``InvalidOperation``, anonymous credentials cannot be
+ refreshed."""
+ raise exceptions.InvalidOperation("Anonymous credentials cannot be refreshed.")
+
+ def apply(self, headers, token=None):
+ """Anonymous credentials do nothing to the request.
+
+ The optional ``token`` argument is not supported.
+
+ Raises:
+ google.auth.exceptions.InvalidValue: If a token was specified.
+ """
+ if token is not None:
+ raise exceptions.InvalidValue("Anonymous credentials don't support tokens.")
+
+ def before_request(self, request, method, url, headers):
+ """Anonymous credentials do nothing to the request."""
+
+
+class ReadOnlyScoped(metaclass=abc.ABCMeta):
+ """Interface for credentials whose scopes can be queried.
+
+ OAuth 2.0-based credentials allow limiting access using scopes as described
+ in `RFC6749 Section 3.3`_.
+ If a credential class implements this interface then the credentials either
+ use scopes in their implementation.
+
+ Some credentials require scopes in order to obtain a token. You can check
+ if scoping is necessary with :attr:`requires_scopes`::
+
+ if credentials.requires_scopes:
+ # Scoping is required.
+ credentials = credentials.with_scopes(scopes=['one', 'two'])
+
+ Credentials that require scopes must either be constructed with scopes::
+
+ credentials = SomeScopedCredentials(scopes=['one', 'two'])
+
+ Or must copy an existing instance using :meth:`with_scopes`::
+
+ scoped_credentials = credentials.with_scopes(scopes=['one', 'two'])
+
+ Some credentials have scopes but do not allow or require scopes to be set,
+ these credentials can be used as-is.
+
+ .. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
+ """
+
+ def __init__(self):
+ super(ReadOnlyScoped, self).__init__()
+ self._scopes = None
+ self._default_scopes = None
+
+ @property
+ def scopes(self):
+ """Sequence[str]: the credentials' current set of scopes."""
+ return self._scopes
+
+ @property
+ def default_scopes(self):
+ """Sequence[str]: the credentials' current set of default scopes."""
+ return self._default_scopes
+
+ @abc.abstractproperty
+ def requires_scopes(self):
+ """True if these credentials require scopes to obtain an access token.
+ """
+ return False
+
+ def has_scopes(self, scopes):
+ """Checks if the credentials have the given scopes.
+
+ .. warning: This method is not guaranteed to be accurate if the
+ credentials are :attr:`~Credentials.invalid`.
+
+ Args:
+ scopes (Sequence[str]): The list of scopes to check.
+
+ Returns:
+ bool: True if the credentials have the given scopes.
+ """
+ credential_scopes = (
+ self._scopes if self._scopes is not None else self._default_scopes
+ )
+ return set(scopes).issubset(set(credential_scopes or []))
+
+
+class Scoped(ReadOnlyScoped):
+ """Interface for credentials whose scopes can be replaced while copying.
+
+ OAuth 2.0-based credentials allow limiting access using scopes as described
+ in `RFC6749 Section 3.3`_.
+ If a credential class implements this interface then the credentials either
+ use scopes in their implementation.
+
+ Some credentials require scopes in order to obtain a token. You can check
+ if scoping is necessary with :attr:`requires_scopes`::
+
+ if credentials.requires_scopes:
+ # Scoping is required.
+ credentials = credentials.create_scoped(['one', 'two'])
+
+ Credentials that require scopes must either be constructed with scopes::
+
+ credentials = SomeScopedCredentials(scopes=['one', 'two'])
+
+ Or must copy an existing instance using :meth:`with_scopes`::
+
+ scoped_credentials = credentials.with_scopes(scopes=['one', 'two'])
+
+ Some credentials have scopes but do not allow or require scopes to be set,
+ these credentials can be used as-is.
+
+ .. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
+ """
+
+ @abc.abstractmethod
+ def with_scopes(self, scopes, default_scopes=None):
+ """Create a copy of these credentials with the specified scopes.
+
+ Args:
+ scopes (Sequence[str]): The list of scopes to attach to the
+ current credentials.
+
+ Raises:
+ NotImplementedError: If the credentials' scopes can not be changed.
+ This can be avoided by checking :attr:`requires_scopes` before
+ calling this method.
+ """
+ raise NotImplementedError("This class does not require scoping.")
+
+
+def with_scopes_if_required(credentials, scopes, default_scopes=None):
+ """Creates a copy of the credentials with scopes if scoping is required.
+
+ This helper function is useful when you do not know (or care to know) the
+ specific type of credentials you are using (such as when you use
+ :func:`google.auth.default`). This function will call
+ :meth:`Scoped.with_scopes` if the credentials are scoped credentials and if
+ the credentials require scoping. Otherwise, it will return the credentials
+ as-is.
+
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials to
+ scope if necessary.
+ scopes (Sequence[str]): The list of scopes to use.
+ default_scopes (Sequence[str]): Default scopes passed by a
+ Google client library. Use 'scopes' for user-defined scopes.
+
+ Returns:
+ google.auth.credentials.Credentials: Either a new set of scoped
+ credentials, or the passed in credentials instance if no scoping
+ was required.
+ """
+ if isinstance(credentials, Scoped) and credentials.requires_scopes:
+ return credentials.with_scopes(scopes, default_scopes=default_scopes)
+ else:
+ return credentials
+
+
+class Signing(metaclass=abc.ABCMeta):
+ """Interface for credentials that can cryptographically sign messages."""
+
+ @abc.abstractmethod
+ def sign_bytes(self, message):
+ """Signs the given message.
+
+ Args:
+ message (bytes): The message to sign.
+
+ Returns:
+ bytes: The message's cryptographic signature.
+ """
+ # pylint: disable=missing-raises-doc,redundant-returns-doc
+ # (pylint doesn't recognize that this is abstract)
+ raise NotImplementedError("Sign bytes must be implemented.")
+
+ @abc.abstractproperty
+ def signer_email(self):
+ """Optional[str]: An email address that identifies the signer."""
+ # pylint: disable=missing-raises-doc
+ # (pylint doesn't recognize that this is abstract)
+ raise NotImplementedError("Signer email must be implemented.")
+
+ @abc.abstractproperty
+ def signer(self):
+ """google.auth.crypt.Signer: The signer used to sign bytes."""
+ # pylint: disable=missing-raises-doc
+ # (pylint doesn't recognize that this is abstract)
+ raise NotImplementedError("Signer must be implemented.")
+
+
+class TokenState(Enum):
+ """
+ Tracks the state of a token.
+ FRESH: The token is valid. It is not expired or close to expired, or the token has no expiry.
+ STALE: The token is close to expired, and should be refreshed. The token can be used normally.
+ INVALID: The token is expired or invalid. The token cannot be used for a normal operation.
+ """
+
+ FRESH = 1
+ STALE = 2
+ INVALID = 3
diff --git a/Lib/site-packages/google/auth/crypt/__init__.py b/Lib/site-packages/google/auth/crypt/__init__.py
new file mode 100644
index 0000000..6d147e7
--- /dev/null
+++ b/Lib/site-packages/google/auth/crypt/__init__.py
@@ -0,0 +1,98 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Cryptography helpers for verifying and signing messages.
+
+The simplest way to verify signatures is using :func:`verify_signature`::
+
+ cert = open('certs.pem').read()
+ valid = crypt.verify_signature(message, signature, cert)
+
+If you're going to verify many messages with the same certificate, you can use
+:class:`RSAVerifier`::
+
+ cert = open('certs.pem').read()
+ verifier = crypt.RSAVerifier.from_string(cert)
+ valid = verifier.verify(message, signature)
+
+To sign messages use :class:`RSASigner` with a private key::
+
+ private_key = open('private_key.pem').read()
+ signer = crypt.RSASigner.from_string(private_key)
+ signature = signer.sign(message)
+
+The code above also works for :class:`ES256Signer` and :class:`ES256Verifier`.
+Note that these two classes are only available if your `cryptography` dependency
+version is at least 1.4.0.
+"""
+
+from google.auth.crypt import base
+from google.auth.crypt import rsa
+
+try:
+ from google.auth.crypt import es256
+except ImportError: # pragma: NO COVER
+ es256 = None # type: ignore
+
+if es256 is not None: # pragma: NO COVER
+ __all__ = [
+ "ES256Signer",
+ "ES256Verifier",
+ "RSASigner",
+ "RSAVerifier",
+ "Signer",
+ "Verifier",
+ ]
+else: # pragma: NO COVER
+ __all__ = ["RSASigner", "RSAVerifier", "Signer", "Verifier"]
+
+
+# Aliases to maintain the v1.0.0 interface, as the crypt module was split
+# into submodules.
+Signer = base.Signer
+Verifier = base.Verifier
+RSASigner = rsa.RSASigner
+RSAVerifier = rsa.RSAVerifier
+
+if es256 is not None: # pragma: NO COVER
+ ES256Signer = es256.ES256Signer
+ ES256Verifier = es256.ES256Verifier
+
+
+def verify_signature(message, signature, certs, verifier_cls=rsa.RSAVerifier):
+ """Verify an RSA or ECDSA cryptographic signature.
+
+ Checks that the provided ``signature`` was generated from ``bytes`` using
+ the private key associated with the ``cert``.
+
+ Args:
+ message (Union[str, bytes]): The plaintext message.
+ signature (Union[str, bytes]): The cryptographic signature to check.
+ certs (Union[Sequence, str, bytes]): The certificate or certificates
+ to use to check the signature.
+ verifier_cls (Optional[~google.auth.crypt.base.Signer]): Which verifier
+ class to use for verification. This can be used to select different
+ algorithms, such as RSA or ECDSA. Default value is :class:`RSAVerifier`.
+
+ Returns:
+ bool: True if the signature is valid, otherwise False.
+ """
+ if isinstance(certs, (str, bytes)):
+ certs = [certs]
+
+ for cert in certs:
+ verifier = verifier_cls.from_string(cert)
+ if verifier.verify(message, signature):
+ return True
+ return False
diff --git a/Lib/site-packages/google/auth/crypt/_cryptography_rsa.py b/Lib/site-packages/google/auth/crypt/_cryptography_rsa.py
new file mode 100644
index 0000000..1a3e9ff
--- /dev/null
+++ b/Lib/site-packages/google/auth/crypt/_cryptography_rsa.py
@@ -0,0 +1,151 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""RSA verifier and signer that use the ``cryptography`` library.
+
+This is a much faster implementation than the default (in
+``google.auth.crypt._python_rsa``), which depends on the pure-Python
+``rsa`` library.
+"""
+
+import cryptography.exceptions
+from cryptography.hazmat import backends
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric import padding
+import cryptography.x509
+
+from google.auth import _helpers
+from google.auth.crypt import base
+
+_CERTIFICATE_MARKER = b"-----BEGIN CERTIFICATE-----"
+_BACKEND = backends.default_backend()
+_PADDING = padding.PKCS1v15()
+_SHA256 = hashes.SHA256()
+
+
+class RSAVerifier(base.Verifier):
+ """Verifies RSA cryptographic signatures using public keys.
+
+ Args:
+ public_key (
+ cryptography.hazmat.primitives.asymmetric.rsa.RSAPublicKey):
+ The public key used to verify signatures.
+ """
+
+ def __init__(self, public_key):
+ self._pubkey = public_key
+
+ @_helpers.copy_docstring(base.Verifier)
+ def verify(self, message, signature):
+ message = _helpers.to_bytes(message)
+ try:
+ self._pubkey.verify(signature, message, _PADDING, _SHA256)
+ return True
+ except (ValueError, cryptography.exceptions.InvalidSignature):
+ return False
+
+ @classmethod
+ def from_string(cls, public_key):
+ """Construct an Verifier instance from a public key or public
+ certificate string.
+
+ Args:
+ public_key (Union[str, bytes]): The public key in PEM format or the
+ x509 public key certificate.
+
+ Returns:
+ Verifier: The constructed verifier.
+
+ Raises:
+ ValueError: If the public key can't be parsed.
+ """
+ public_key_data = _helpers.to_bytes(public_key)
+
+ if _CERTIFICATE_MARKER in public_key_data:
+ cert = cryptography.x509.load_pem_x509_certificate(
+ public_key_data, _BACKEND
+ )
+ pubkey = cert.public_key()
+
+ else:
+ pubkey = serialization.load_pem_public_key(public_key_data, _BACKEND)
+
+ return cls(pubkey)
+
+
+class RSASigner(base.Signer, base.FromServiceAccountMixin):
+ """Signs messages with an RSA private key.
+
+ Args:
+ private_key (
+ cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey):
+ The private key to sign with.
+ key_id (str): Optional key ID used to identify this private key. This
+ can be useful to associate the private key with its associated
+ public key or certificate.
+ """
+
+ def __init__(self, private_key, key_id=None):
+ self._key = private_key
+ self._key_id = key_id
+
+ @property # type: ignore
+ @_helpers.copy_docstring(base.Signer)
+ def key_id(self):
+ return self._key_id
+
+ @_helpers.copy_docstring(base.Signer)
+ def sign(self, message):
+ message = _helpers.to_bytes(message)
+ return self._key.sign(message, _PADDING, _SHA256)
+
+ @classmethod
+ def from_string(cls, key, key_id=None):
+ """Construct a RSASigner from a private key in PEM format.
+
+ Args:
+ key (Union[bytes, str]): Private key in PEM format.
+ key_id (str): An optional key id used to identify the private key.
+
+ Returns:
+ google.auth.crypt._cryptography_rsa.RSASigner: The
+ constructed signer.
+
+ Raises:
+ ValueError: If ``key`` is not ``bytes`` or ``str`` (unicode).
+ UnicodeDecodeError: If ``key`` is ``bytes`` but cannot be decoded
+ into a UTF-8 ``str``.
+ ValueError: If ``cryptography`` "Could not deserialize key data."
+ """
+ key = _helpers.to_bytes(key)
+ private_key = serialization.load_pem_private_key(
+ key, password=None, backend=_BACKEND
+ )
+ return cls(private_key, key_id=key_id)
+
+ def __getstate__(self):
+ """Pickle helper that serializes the _key attribute."""
+ state = self.__dict__.copy()
+ state["_key"] = self._key.private_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PrivateFormat.PKCS8,
+ encryption_algorithm=serialization.NoEncryption(),
+ )
+ return state
+
+ def __setstate__(self, state):
+ """Pickle helper that deserializes the _key attribute."""
+ state["_key"] = serialization.load_pem_private_key(state["_key"], None)
+ self.__dict__.update(state)
diff --git a/Lib/site-packages/google/auth/crypt/_helpers.py b/Lib/site-packages/google/auth/crypt/_helpers.py
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/google/auth/crypt/_python_rsa.py b/Lib/site-packages/google/auth/crypt/_python_rsa.py
new file mode 100644
index 0000000..e553c25
--- /dev/null
+++ b/Lib/site-packages/google/auth/crypt/_python_rsa.py
@@ -0,0 +1,175 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Pure-Python RSA cryptography implementation.
+
+Uses the ``rsa``, ``pyasn1`` and ``pyasn1_modules`` packages
+to parse PEM files storing PKCS#1 or PKCS#8 keys as well as
+certificates. There is no support for p12 files.
+"""
+
+from __future__ import absolute_import
+
+import io
+
+from pyasn1.codec.der import decoder # type: ignore
+from pyasn1_modules import pem # type: ignore
+from pyasn1_modules.rfc2459 import Certificate # type: ignore
+from pyasn1_modules.rfc5208 import PrivateKeyInfo # type: ignore
+import rsa # type: ignore
+
+from google.auth import _helpers
+from google.auth import exceptions
+from google.auth.crypt import base
+
+_POW2 = (128, 64, 32, 16, 8, 4, 2, 1)
+_CERTIFICATE_MARKER = b"-----BEGIN CERTIFICATE-----"
+_PKCS1_MARKER = ("-----BEGIN RSA PRIVATE KEY-----", "-----END RSA PRIVATE KEY-----")
+_PKCS8_MARKER = ("-----BEGIN PRIVATE KEY-----", "-----END PRIVATE KEY-----")
+_PKCS8_SPEC = PrivateKeyInfo()
+
+
+def _bit_list_to_bytes(bit_list):
+ """Converts an iterable of 1s and 0s to bytes.
+
+ Combines the list 8 at a time, treating each group of 8 bits
+ as a single byte.
+
+ Args:
+ bit_list (Sequence): Sequence of 1s and 0s.
+
+ Returns:
+ bytes: The decoded bytes.
+ """
+ num_bits = len(bit_list)
+ byte_vals = bytearray()
+ for start in range(0, num_bits, 8):
+ curr_bits = bit_list[start : start + 8]
+ char_val = sum(val * digit for val, digit in zip(_POW2, curr_bits))
+ byte_vals.append(char_val)
+ return bytes(byte_vals)
+
+
+class RSAVerifier(base.Verifier):
+ """Verifies RSA cryptographic signatures using public keys.
+
+ Args:
+ public_key (rsa.key.PublicKey): The public key used to verify
+ signatures.
+ """
+
+ def __init__(self, public_key):
+ self._pubkey = public_key
+
+ @_helpers.copy_docstring(base.Verifier)
+ def verify(self, message, signature):
+ message = _helpers.to_bytes(message)
+ try:
+ return rsa.pkcs1.verify(message, signature, self._pubkey)
+ except (ValueError, rsa.pkcs1.VerificationError):
+ return False
+
+ @classmethod
+ def from_string(cls, public_key):
+ """Construct an Verifier instance from a public key or public
+ certificate string.
+
+ Args:
+ public_key (Union[str, bytes]): The public key in PEM format or the
+ x509 public key certificate.
+
+ Returns:
+ google.auth.crypt._python_rsa.RSAVerifier: The constructed verifier.
+
+ Raises:
+ ValueError: If the public_key can't be parsed.
+ """
+ public_key = _helpers.to_bytes(public_key)
+ is_x509_cert = _CERTIFICATE_MARKER in public_key
+
+ # If this is a certificate, extract the public key info.
+ if is_x509_cert:
+ der = rsa.pem.load_pem(public_key, "CERTIFICATE")
+ asn1_cert, remaining = decoder.decode(der, asn1Spec=Certificate())
+ if remaining != b"":
+ raise exceptions.InvalidValue("Unused bytes", remaining)
+
+ cert_info = asn1_cert["tbsCertificate"]["subjectPublicKeyInfo"]
+ key_bytes = _bit_list_to_bytes(cert_info["subjectPublicKey"])
+ pubkey = rsa.PublicKey.load_pkcs1(key_bytes, "DER")
+ else:
+ pubkey = rsa.PublicKey.load_pkcs1(public_key, "PEM")
+ return cls(pubkey)
+
+
+class RSASigner(base.Signer, base.FromServiceAccountMixin):
+ """Signs messages with an RSA private key.
+
+ Args:
+ private_key (rsa.key.PrivateKey): The private key to sign with.
+ key_id (str): Optional key ID used to identify this private key. This
+ can be useful to associate the private key with its associated
+ public key or certificate.
+ """
+
+ def __init__(self, private_key, key_id=None):
+ self._key = private_key
+ self._key_id = key_id
+
+ @property # type: ignore
+ @_helpers.copy_docstring(base.Signer)
+ def key_id(self):
+ return self._key_id
+
+ @_helpers.copy_docstring(base.Signer)
+ def sign(self, message):
+ message = _helpers.to_bytes(message)
+ return rsa.pkcs1.sign(message, self._key, "SHA-256")
+
+ @classmethod
+ def from_string(cls, key, key_id=None):
+ """Construct an Signer instance from a private key in PEM format.
+
+ Args:
+ key (str): Private key in PEM format.
+ key_id (str): An optional key id used to identify the private key.
+
+ Returns:
+ google.auth.crypt.Signer: The constructed signer.
+
+ Raises:
+ ValueError: If the key cannot be parsed as PKCS#1 or PKCS#8 in
+ PEM format.
+ """
+ key = _helpers.from_bytes(key) # PEM expects str in Python 3
+ marker_id, key_bytes = pem.readPemBlocksFromFile(
+ io.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER
+ )
+
+ # Key is in pkcs1 format.
+ if marker_id == 0:
+ private_key = rsa.key.PrivateKey.load_pkcs1(key_bytes, format="DER")
+ # Key is in pkcs8.
+ elif marker_id == 1:
+ key_info, remaining = decoder.decode(key_bytes, asn1Spec=_PKCS8_SPEC)
+ if remaining != b"":
+ raise exceptions.InvalidValue("Unused bytes", remaining)
+ private_key_info = key_info.getComponentByName("privateKey")
+ private_key = rsa.key.PrivateKey.load_pkcs1(
+ private_key_info.asOctets(), format="DER"
+ )
+ else:
+ raise exceptions.MalformedError("No key could be detected.")
+
+ return cls(private_key, key_id=key_id)
diff --git a/Lib/site-packages/google/auth/crypt/base.py b/Lib/site-packages/google/auth/crypt/base.py
new file mode 100644
index 0000000..ad871c3
--- /dev/null
+++ b/Lib/site-packages/google/auth/crypt/base.py
@@ -0,0 +1,127 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Base classes for cryptographic signers and verifiers."""
+
+import abc
+import io
+import json
+
+from google.auth import exceptions
+
+_JSON_FILE_PRIVATE_KEY = "private_key"
+_JSON_FILE_PRIVATE_KEY_ID = "private_key_id"
+
+
+class Verifier(metaclass=abc.ABCMeta):
+ """Abstract base class for crytographic signature verifiers."""
+
+ @abc.abstractmethod
+ def verify(self, message, signature):
+ """Verifies a message against a cryptographic signature.
+
+ Args:
+ message (Union[str, bytes]): The message to verify.
+ signature (Union[str, bytes]): The cryptography signature to check.
+
+ Returns:
+ bool: True if message was signed by the private key associated
+ with the public key that this object was constructed with.
+ """
+ # pylint: disable=missing-raises-doc,redundant-returns-doc
+ # (pylint doesn't recognize that this is abstract)
+ raise NotImplementedError("Verify must be implemented")
+
+
+class Signer(metaclass=abc.ABCMeta):
+ """Abstract base class for cryptographic signers."""
+
+ @abc.abstractproperty
+ def key_id(self):
+ """Optional[str]: The key ID used to identify this private key."""
+ raise NotImplementedError("Key id must be implemented")
+
+ @abc.abstractmethod
+ def sign(self, message):
+ """Signs a message.
+
+ Args:
+ message (Union[str, bytes]): The message to be signed.
+
+ Returns:
+ bytes: The signature of the message.
+ """
+ # pylint: disable=missing-raises-doc,redundant-returns-doc
+ # (pylint doesn't recognize that this is abstract)
+ raise NotImplementedError("Sign must be implemented")
+
+
+class FromServiceAccountMixin(metaclass=abc.ABCMeta):
+ """Mix-in to enable factory constructors for a Signer."""
+
+ @abc.abstractmethod
+ def from_string(cls, key, key_id=None):
+ """Construct an Signer instance from a private key string.
+
+ Args:
+ key (str): Private key as a string.
+ key_id (str): An optional key id used to identify the private key.
+
+ Returns:
+ google.auth.crypt.Signer: The constructed signer.
+
+ Raises:
+ ValueError: If the key cannot be parsed.
+ """
+ raise NotImplementedError("from_string must be implemented")
+
+ @classmethod
+ def from_service_account_info(cls, info):
+ """Creates a Signer instance instance from a dictionary containing
+ service account info in Google format.
+
+ Args:
+ info (Mapping[str, str]): The service account info in Google
+ format.
+
+ Returns:
+ google.auth.crypt.Signer: The constructed signer.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ if _JSON_FILE_PRIVATE_KEY not in info:
+ raise exceptions.MalformedError(
+ "The private_key field was not found in the service account " "info."
+ )
+
+ return cls.from_string(
+ info[_JSON_FILE_PRIVATE_KEY], info.get(_JSON_FILE_PRIVATE_KEY_ID)
+ )
+
+ @classmethod
+ def from_service_account_file(cls, filename):
+ """Creates a Signer instance from a service account .json file
+ in Google format.
+
+ Args:
+ filename (str): The path to the service account .json file.
+
+ Returns:
+ google.auth.crypt.Signer: The constructed signer.
+ """
+ with io.open(filename, "r", encoding="utf-8") as json_file:
+ data = json.load(json_file)
+
+ return cls.from_service_account_info(data)
diff --git a/Lib/site-packages/google/auth/crypt/es256.py b/Lib/site-packages/google/auth/crypt/es256.py
new file mode 100644
index 0000000..820e4be
--- /dev/null
+++ b/Lib/site-packages/google/auth/crypt/es256.py
@@ -0,0 +1,175 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""ECDSA (ES256) verifier and signer that use the ``cryptography`` library.
+"""
+
+from cryptography import utils # type: ignore
+import cryptography.exceptions
+from cryptography.hazmat import backends
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric import ec
+from cryptography.hazmat.primitives.asymmetric import padding
+from cryptography.hazmat.primitives.asymmetric.utils import decode_dss_signature
+from cryptography.hazmat.primitives.asymmetric.utils import encode_dss_signature
+import cryptography.x509
+
+from google.auth import _helpers
+from google.auth.crypt import base
+
+
+_CERTIFICATE_MARKER = b"-----BEGIN CERTIFICATE-----"
+_BACKEND = backends.default_backend()
+_PADDING = padding.PKCS1v15()
+
+
+class ES256Verifier(base.Verifier):
+ """Verifies ECDSA cryptographic signatures using public keys.
+
+ Args:
+ public_key (
+ cryptography.hazmat.primitives.asymmetric.ec.ECDSAPublicKey):
+ The public key used to verify signatures.
+ """
+
+ def __init__(self, public_key):
+ self._pubkey = public_key
+
+ @_helpers.copy_docstring(base.Verifier)
+ def verify(self, message, signature):
+ # First convert (r||s) raw signature to ASN1 encoded signature.
+ sig_bytes = _helpers.to_bytes(signature)
+ if len(sig_bytes) != 64:
+ return False
+ r = (
+ int.from_bytes(sig_bytes[:32], byteorder="big")
+ if _helpers.is_python_3()
+ else utils.int_from_bytes(sig_bytes[:32], byteorder="big")
+ )
+ s = (
+ int.from_bytes(sig_bytes[32:], byteorder="big")
+ if _helpers.is_python_3()
+ else utils.int_from_bytes(sig_bytes[32:], byteorder="big")
+ )
+ asn1_sig = encode_dss_signature(r, s)
+
+ message = _helpers.to_bytes(message)
+ try:
+ self._pubkey.verify(asn1_sig, message, ec.ECDSA(hashes.SHA256()))
+ return True
+ except (ValueError, cryptography.exceptions.InvalidSignature):
+ return False
+
+ @classmethod
+ def from_string(cls, public_key):
+ """Construct an Verifier instance from a public key or public
+ certificate string.
+
+ Args:
+ public_key (Union[str, bytes]): The public key in PEM format or the
+ x509 public key certificate.
+
+ Returns:
+ Verifier: The constructed verifier.
+
+ Raises:
+ ValueError: If the public key can't be parsed.
+ """
+ public_key_data = _helpers.to_bytes(public_key)
+
+ if _CERTIFICATE_MARKER in public_key_data:
+ cert = cryptography.x509.load_pem_x509_certificate(
+ public_key_data, _BACKEND
+ )
+ pubkey = cert.public_key()
+
+ else:
+ pubkey = serialization.load_pem_public_key(public_key_data, _BACKEND)
+
+ return cls(pubkey)
+
+
+class ES256Signer(base.Signer, base.FromServiceAccountMixin):
+ """Signs messages with an ECDSA private key.
+
+ Args:
+ private_key (
+ cryptography.hazmat.primitives.asymmetric.ec.ECDSAPrivateKey):
+ The private key to sign with.
+ key_id (str): Optional key ID used to identify this private key. This
+ can be useful to associate the private key with its associated
+ public key or certificate.
+ """
+
+ def __init__(self, private_key, key_id=None):
+ self._key = private_key
+ self._key_id = key_id
+
+ @property # type: ignore
+ @_helpers.copy_docstring(base.Signer)
+ def key_id(self):
+ return self._key_id
+
+ @_helpers.copy_docstring(base.Signer)
+ def sign(self, message):
+ message = _helpers.to_bytes(message)
+ asn1_signature = self._key.sign(message, ec.ECDSA(hashes.SHA256()))
+
+ # Convert ASN1 encoded signature to (r||s) raw signature.
+ (r, s) = decode_dss_signature(asn1_signature)
+ return (
+ (r.to_bytes(32, byteorder="big") + s.to_bytes(32, byteorder="big"))
+ if _helpers.is_python_3()
+ else (utils.int_to_bytes(r, 32) + utils.int_to_bytes(s, 32))
+ )
+
+ @classmethod
+ def from_string(cls, key, key_id=None):
+ """Construct a RSASigner from a private key in PEM format.
+
+ Args:
+ key (Union[bytes, str]): Private key in PEM format.
+ key_id (str): An optional key id used to identify the private key.
+
+ Returns:
+ google.auth.crypt._cryptography_rsa.RSASigner: The
+ constructed signer.
+
+ Raises:
+ ValueError: If ``key`` is not ``bytes`` or ``str`` (unicode).
+ UnicodeDecodeError: If ``key`` is ``bytes`` but cannot be decoded
+ into a UTF-8 ``str``.
+ ValueError: If ``cryptography`` "Could not deserialize key data."
+ """
+ key = _helpers.to_bytes(key)
+ private_key = serialization.load_pem_private_key(
+ key, password=None, backend=_BACKEND
+ )
+ return cls(private_key, key_id=key_id)
+
+ def __getstate__(self):
+ """Pickle helper that serializes the _key attribute."""
+ state = self.__dict__.copy()
+ state["_key"] = self._key.private_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PrivateFormat.PKCS8,
+ encryption_algorithm=serialization.NoEncryption(),
+ )
+ return state
+
+ def __setstate__(self, state):
+ """Pickle helper that deserializes the _key attribute."""
+ state["_key"] = serialization.load_pem_private_key(state["_key"], None)
+ self.__dict__.update(state)
diff --git a/Lib/site-packages/google/auth/crypt/rsa.py b/Lib/site-packages/google/auth/crypt/rsa.py
new file mode 100644
index 0000000..ed842d1
--- /dev/null
+++ b/Lib/site-packages/google/auth/crypt/rsa.py
@@ -0,0 +1,30 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""RSA cryptography signer and verifier."""
+
+
+try:
+ # Prefer cryptograph-based RSA implementation.
+ from google.auth.crypt import _cryptography_rsa
+
+ RSASigner = _cryptography_rsa.RSASigner
+ RSAVerifier = _cryptography_rsa.RSAVerifier
+except ImportError: # pragma: NO COVER
+ # Fallback to pure-python RSA implementation if cryptography is
+ # unavailable.
+ from google.auth.crypt import _python_rsa
+
+ RSASigner = _python_rsa.RSASigner # type: ignore
+ RSAVerifier = _python_rsa.RSAVerifier # type: ignore
diff --git a/Lib/site-packages/google/auth/downscoped.py b/Lib/site-packages/google/auth/downscoped.py
new file mode 100644
index 0000000..b4d9d38
--- /dev/null
+++ b/Lib/site-packages/google/auth/downscoped.py
@@ -0,0 +1,504 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Downscoping with Credential Access Boundaries
+
+This module provides the ability to downscope credentials using
+`Downscoping with Credential Access Boundaries`_. This is useful to restrict the
+Identity and Access Management (IAM) permissions that a short-lived credential
+can use.
+
+To downscope permissions of a source credential, a Credential Access Boundary
+that specifies which resources the new credential can access, as well as
+an upper bound on the permissions that are available on each resource, has to
+be defined. A downscoped credential can then be instantiated using the source
+credential and the Credential Access Boundary.
+
+The common pattern of usage is to have a token broker with elevated access
+generate these downscoped credentials from higher access source credentials and
+pass the downscoped short-lived access tokens to a token consumer via some
+secure authenticated channel for limited access to Google Cloud Storage
+resources.
+
+For example, a token broker can be set up on a server in a private network.
+Various workloads (token consumers) in the same network will send authenticated
+requests to that broker for downscoped tokens to access or modify specific google
+cloud storage buckets.
+
+The broker will instantiate downscoped credentials instances that can be used to
+generate short lived downscoped access tokens that can be passed to the token
+consumer. These downscoped access tokens can be injected by the consumer into
+google.oauth2.Credentials and used to initialize a storage client instance to
+access Google Cloud Storage resources with restricted access.
+
+Note: Only Cloud Storage supports Credential Access Boundaries. Other Google
+Cloud services do not support this feature.
+
+.. _Downscoping with Credential Access Boundaries: https://cloud.google.com/iam/docs/downscoping-short-lived-credentials
+"""
+
+import datetime
+
+from google.auth import _helpers
+from google.auth import credentials
+from google.auth import exceptions
+from google.oauth2 import sts
+
+# The maximum number of access boundary rules a Credential Access Boundary can
+# contain.
+_MAX_ACCESS_BOUNDARY_RULES_COUNT = 10
+# The token exchange grant_type used for exchanging credentials.
+_STS_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:token-exchange"
+# The token exchange requested_token_type. This is always an access_token.
+_STS_REQUESTED_TOKEN_TYPE = "urn:ietf:params:oauth:token-type:access_token"
+# The STS token URL used to exchanged a short lived access token for a downscoped one.
+_STS_TOKEN_URL = "https://sts.googleapis.com/v1/token"
+# The subject token type to use when exchanging a short lived access token for a
+# downscoped token.
+_STS_SUBJECT_TOKEN_TYPE = "urn:ietf:params:oauth:token-type:access_token"
+
+
+class CredentialAccessBoundary(object):
+ """Defines a Credential Access Boundary which contains a list of access boundary
+ rules. Each rule contains information on the resource that the rule applies to,
+ the upper bound of the permissions that are available on that resource and an
+ optional condition to further restrict permissions.
+ """
+
+ def __init__(self, rules=[]):
+ """Instantiates a Credential Access Boundary. A Credential Access Boundary
+ can contain up to 10 access boundary rules.
+
+ Args:
+ rules (Sequence[google.auth.downscoped.AccessBoundaryRule]): The list of
+ access boundary rules limiting the access that a downscoped credential
+ will have.
+ Raises:
+ InvalidType: If any of the rules are not a valid type.
+ InvalidValue: If the provided rules exceed the maximum allowed.
+ """
+ self.rules = rules
+
+ @property
+ def rules(self):
+ """Returns the list of access boundary rules defined on the Credential
+ Access Boundary.
+
+ Returns:
+ Tuple[google.auth.downscoped.AccessBoundaryRule, ...]: The list of access
+ boundary rules defined on the Credential Access Boundary. These are returned
+ as an immutable tuple to prevent modification.
+ """
+ return tuple(self._rules)
+
+ @rules.setter
+ def rules(self, value):
+ """Updates the current rules on the Credential Access Boundary. This will overwrite
+ the existing set of rules.
+
+ Args:
+ value (Sequence[google.auth.downscoped.AccessBoundaryRule]): The list of
+ access boundary rules limiting the access that a downscoped credential
+ will have.
+ Raises:
+ InvalidType: If any of the rules are not a valid type.
+ InvalidValue: If the provided rules exceed the maximum allowed.
+ """
+ if len(value) > _MAX_ACCESS_BOUNDARY_RULES_COUNT:
+ raise exceptions.InvalidValue(
+ "Credential access boundary rules can have a maximum of {} rules.".format(
+ _MAX_ACCESS_BOUNDARY_RULES_COUNT
+ )
+ )
+ for access_boundary_rule in value:
+ if not isinstance(access_boundary_rule, AccessBoundaryRule):
+ raise exceptions.InvalidType(
+ "List of rules provided do not contain a valid 'google.auth.downscoped.AccessBoundaryRule'."
+ )
+ # Make a copy of the original list.
+ self._rules = list(value)
+
+ def add_rule(self, rule):
+ """Adds a single access boundary rule to the existing rules.
+
+ Args:
+ rule (google.auth.downscoped.AccessBoundaryRule): The access boundary rule,
+ limiting the access that a downscoped credential will have, to be added to
+ the existing rules.
+ Raises:
+ InvalidType: If any of the rules are not a valid type.
+ InvalidValue: If the provided rules exceed the maximum allowed.
+ """
+ if len(self.rules) == _MAX_ACCESS_BOUNDARY_RULES_COUNT:
+ raise exceptions.InvalidValue(
+ "Credential access boundary rules can have a maximum of {} rules.".format(
+ _MAX_ACCESS_BOUNDARY_RULES_COUNT
+ )
+ )
+ if not isinstance(rule, AccessBoundaryRule):
+ raise exceptions.InvalidType(
+ "The provided rule does not contain a valid 'google.auth.downscoped.AccessBoundaryRule'."
+ )
+ self._rules.append(rule)
+
+ def to_json(self):
+ """Generates the dictionary representation of the Credential Access Boundary.
+ This uses the format expected by the Security Token Service API as documented in
+ `Defining a Credential Access Boundary`_.
+
+ .. _Defining a Credential Access Boundary:
+ https://cloud.google.com/iam/docs/downscoping-short-lived-credentials#define-boundary
+
+ Returns:
+ Mapping: Credential Access Boundary Rule represented in a dictionary object.
+ """
+ rules = []
+ for access_boundary_rule in self.rules:
+ rules.append(access_boundary_rule.to_json())
+
+ return {"accessBoundary": {"accessBoundaryRules": rules}}
+
+
+class AccessBoundaryRule(object):
+ """Defines an access boundary rule which contains information on the resource that
+ the rule applies to, the upper bound of the permissions that are available on that
+ resource and an optional condition to further restrict permissions.
+ """
+
+ def __init__(
+ self, available_resource, available_permissions, availability_condition=None
+ ):
+ """Instantiates a single access boundary rule.
+
+ Args:
+ available_resource (str): The full resource name of the Cloud Storage bucket
+ that the rule applies to. Use the format
+ "//storage.googleapis.com/projects/_/buckets/bucket-name".
+ available_permissions (Sequence[str]): A list defining the upper bound that
+ the downscoped token will have on the available permissions for the
+ resource. Each value is the identifier for an IAM predefined role or
+ custom role, with the prefix "inRole:". For example:
+ "inRole:roles/storage.objectViewer".
+ Only the permissions in these roles will be available.
+ availability_condition (Optional[google.auth.downscoped.AvailabilityCondition]):
+ Optional condition that restricts the availability of permissions to
+ specific Cloud Storage objects.
+
+ Raises:
+ InvalidType: If any of the parameters are not of the expected types.
+ InvalidValue: If any of the parameters are not of the expected values.
+ """
+ self.available_resource = available_resource
+ self.available_permissions = available_permissions
+ self.availability_condition = availability_condition
+
+ @property
+ def available_resource(self):
+ """Returns the current available resource.
+
+ Returns:
+ str: The current available resource.
+ """
+ return self._available_resource
+
+ @available_resource.setter
+ def available_resource(self, value):
+ """Updates the current available resource.
+
+ Args:
+ value (str): The updated value of the available resource.
+
+ Raises:
+ google.auth.exceptions.InvalidType: If the value is not a string.
+ """
+ if not isinstance(value, str):
+ raise exceptions.InvalidType(
+ "The provided available_resource is not a string."
+ )
+ self._available_resource = value
+
+ @property
+ def available_permissions(self):
+ """Returns the current available permissions.
+
+ Returns:
+ Tuple[str, ...]: The current available permissions. These are returned
+ as an immutable tuple to prevent modification.
+ """
+ return tuple(self._available_permissions)
+
+ @available_permissions.setter
+ def available_permissions(self, value):
+ """Updates the current available permissions.
+
+ Args:
+ value (Sequence[str]): The updated value of the available permissions.
+
+ Raises:
+ InvalidType: If the value is not a list of strings.
+ InvalidValue: If the value is not valid.
+ """
+ for available_permission in value:
+ if not isinstance(available_permission, str):
+ raise exceptions.InvalidType(
+ "Provided available_permissions are not a list of strings."
+ )
+ if available_permission.find("inRole:") != 0:
+ raise exceptions.InvalidValue(
+ "available_permissions must be prefixed with 'inRole:'."
+ )
+ # Make a copy of the original list.
+ self._available_permissions = list(value)
+
+ @property
+ def availability_condition(self):
+ """Returns the current availability condition.
+
+ Returns:
+ Optional[google.auth.downscoped.AvailabilityCondition]: The current
+ availability condition.
+ """
+ return self._availability_condition
+
+ @availability_condition.setter
+ def availability_condition(self, value):
+ """Updates the current availability condition.
+
+ Args:
+ value (Optional[google.auth.downscoped.AvailabilityCondition]): The updated
+ value of the availability condition.
+
+ Raises:
+ google.auth.exceptions.InvalidType: If the value is not of type google.auth.downscoped.AvailabilityCondition
+ or None.
+ """
+ if not isinstance(value, AvailabilityCondition) and value is not None:
+ raise exceptions.InvalidType(
+ "The provided availability_condition is not a 'google.auth.downscoped.AvailabilityCondition' or None."
+ )
+ self._availability_condition = value
+
+ def to_json(self):
+ """Generates the dictionary representation of the access boundary rule.
+ This uses the format expected by the Security Token Service API as documented in
+ `Defining a Credential Access Boundary`_.
+
+ .. _Defining a Credential Access Boundary:
+ https://cloud.google.com/iam/docs/downscoping-short-lived-credentials#define-boundary
+
+ Returns:
+ Mapping: The access boundary rule represented in a dictionary object.
+ """
+ json = {
+ "availablePermissions": list(self.available_permissions),
+ "availableResource": self.available_resource,
+ }
+ if self.availability_condition:
+ json["availabilityCondition"] = self.availability_condition.to_json()
+ return json
+
+
+class AvailabilityCondition(object):
+ """An optional condition that can be used as part of a Credential Access Boundary
+ to further restrict permissions."""
+
+ def __init__(self, expression, title=None, description=None):
+ """Instantiates an availability condition using the provided expression and
+ optional title or description.
+
+ Args:
+ expression (str): A condition expression that specifies the Cloud Storage
+ objects where permissions are available. For example, this expression
+ makes permissions available for objects whose name starts with "customer-a":
+ "resource.name.startsWith('projects/_/buckets/example-bucket/objects/customer-a')"
+ title (Optional[str]): An optional short string that identifies the purpose of
+ the condition.
+ description (Optional[str]): Optional details about the purpose of the condition.
+
+ Raises:
+ InvalidType: If any of the parameters are not of the expected types.
+ InvalidValue: If any of the parameters are not of the expected values.
+ """
+ self.expression = expression
+ self.title = title
+ self.description = description
+
+ @property
+ def expression(self):
+ """Returns the current condition expression.
+
+ Returns:
+ str: The current conditon expression.
+ """
+ return self._expression
+
+ @expression.setter
+ def expression(self, value):
+ """Updates the current condition expression.
+
+ Args:
+ value (str): The updated value of the condition expression.
+
+ Raises:
+ google.auth.exceptions.InvalidType: If the value is not of type string.
+ """
+ if not isinstance(value, str):
+ raise exceptions.InvalidType("The provided expression is not a string.")
+ self._expression = value
+
+ @property
+ def title(self):
+ """Returns the current title.
+
+ Returns:
+ Optional[str]: The current title.
+ """
+ return self._title
+
+ @title.setter
+ def title(self, value):
+ """Updates the current title.
+
+ Args:
+ value (Optional[str]): The updated value of the title.
+
+ Raises:
+ google.auth.exceptions.InvalidType: If the value is not of type string or None.
+ """
+ if not isinstance(value, str) and value is not None:
+ raise exceptions.InvalidType("The provided title is not a string or None.")
+ self._title = value
+
+ @property
+ def description(self):
+ """Returns the current description.
+
+ Returns:
+ Optional[str]: The current description.
+ """
+ return self._description
+
+ @description.setter
+ def description(self, value):
+ """Updates the current description.
+
+ Args:
+ value (Optional[str]): The updated value of the description.
+
+ Raises:
+ google.auth.exceptions.InvalidType: If the value is not of type string or None.
+ """
+ if not isinstance(value, str) and value is not None:
+ raise exceptions.InvalidType(
+ "The provided description is not a string or None."
+ )
+ self._description = value
+
+ def to_json(self):
+ """Generates the dictionary representation of the availability condition.
+ This uses the format expected by the Security Token Service API as documented in
+ `Defining a Credential Access Boundary`_.
+
+ .. _Defining a Credential Access Boundary:
+ https://cloud.google.com/iam/docs/downscoping-short-lived-credentials#define-boundary
+
+ Returns:
+ Mapping[str, str]: The availability condition represented in a dictionary
+ object.
+ """
+ json = {"expression": self.expression}
+ if self.title:
+ json["title"] = self.title
+ if self.description:
+ json["description"] = self.description
+ return json
+
+
+class Credentials(credentials.CredentialsWithQuotaProject):
+ """Defines a set of Google credentials that are downscoped from an existing set
+ of Google OAuth2 credentials. This is useful to restrict the Identity and Access
+ Management (IAM) permissions that a short-lived credential can use.
+ The common pattern of usage is to have a token broker with elevated access
+ generate these downscoped credentials from higher access source credentials and
+ pass the downscoped short-lived access tokens to a token consumer via some
+ secure authenticated channel for limited access to Google Cloud Storage
+ resources.
+ """
+
+ def __init__(
+ self, source_credentials, credential_access_boundary, quota_project_id=None
+ ):
+ """Instantiates a downscoped credentials object using the provided source
+ credentials and credential access boundary rules.
+ To downscope permissions of a source credential, a Credential Access Boundary
+ that specifies which resources the new credential can access, as well as an
+ upper bound on the permissions that are available on each resource, has to be
+ defined. A downscoped credential can then be instantiated using the source
+ credential and the Credential Access Boundary.
+
+ Args:
+ source_credentials (google.auth.credentials.Credentials): The source credentials
+ to be downscoped based on the provided Credential Access Boundary rules.
+ credential_access_boundary (google.auth.downscoped.CredentialAccessBoundary):
+ The Credential Access Boundary which contains a list of access boundary
+ rules. Each rule contains information on the resource that the rule applies to,
+ the upper bound of the permissions that are available on that resource and an
+ optional condition to further restrict permissions.
+ quota_project_id (Optional[str]): The optional quota project ID.
+ Raises:
+ google.auth.exceptions.RefreshError: If the source credentials
+ return an error on token refresh.
+ google.auth.exceptions.OAuthError: If the STS token exchange
+ endpoint returned an error during downscoped token generation.
+ """
+
+ super(Credentials, self).__init__()
+ self._source_credentials = source_credentials
+ self._credential_access_boundary = credential_access_boundary
+ self._quota_project_id = quota_project_id
+ self._sts_client = sts.Client(_STS_TOKEN_URL)
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def refresh(self, request):
+ # Generate an access token from the source credentials.
+ self._source_credentials.refresh(request)
+ now = _helpers.utcnow()
+ # Exchange the access token for a downscoped access token.
+ response_data = self._sts_client.exchange_token(
+ request=request,
+ grant_type=_STS_GRANT_TYPE,
+ subject_token=self._source_credentials.token,
+ subject_token_type=_STS_SUBJECT_TOKEN_TYPE,
+ requested_token_type=_STS_REQUESTED_TOKEN_TYPE,
+ additional_options=self._credential_access_boundary.to_json(),
+ )
+ self.token = response_data.get("access_token")
+ # For downscoping CAB flow, the STS endpoint may not return the expiration
+ # field for some flows. The generated downscoped token should always have
+ # the same expiration time as the source credentials. When no expires_in
+ # field is returned in the response, we can just get the expiration time
+ # from the source credentials.
+ if response_data.get("expires_in"):
+ lifetime = datetime.timedelta(seconds=response_data.get("expires_in"))
+ self.expiry = now + lifetime
+ else:
+ self.expiry = self._source_credentials.expiry
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ return self.__class__(
+ self._source_credentials,
+ self._credential_access_boundary,
+ quota_project_id=quota_project_id,
+ )
diff --git a/Lib/site-packages/google/auth/environment_vars.py b/Lib/site-packages/google/auth/environment_vars.py
new file mode 100644
index 0000000..81f3157
--- /dev/null
+++ b/Lib/site-packages/google/auth/environment_vars.py
@@ -0,0 +1,84 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Environment variables used by :mod:`google.auth`."""
+
+
+PROJECT = "GOOGLE_CLOUD_PROJECT"
+"""Environment variable defining default project.
+
+This used by :func:`google.auth.default` to explicitly set a project ID. This
+environment variable is also used by the Google Cloud Python Library.
+"""
+
+LEGACY_PROJECT = "GCLOUD_PROJECT"
+"""Previously used environment variable defining the default project.
+
+This environment variable is used instead of the current one in some
+situations (such as Google App Engine).
+"""
+
+GOOGLE_CLOUD_QUOTA_PROJECT = "GOOGLE_CLOUD_QUOTA_PROJECT"
+"""Environment variable defining the project to be used for
+quota and billing."""
+
+CREDENTIALS = "GOOGLE_APPLICATION_CREDENTIALS"
+"""Environment variable defining the location of Google application default
+credentials."""
+
+# The environment variable name which can replace ~/.config if set.
+CLOUD_SDK_CONFIG_DIR = "CLOUDSDK_CONFIG"
+"""Environment variable defines the location of Google Cloud SDK's config
+files."""
+
+# These two variables allow for customization of the addresses used when
+# contacting the GCE metadata service.
+GCE_METADATA_HOST = "GCE_METADATA_HOST"
+"""Environment variable providing an alternate hostname or host:port to be
+used for GCE metadata requests.
+
+This environment variable was originally named GCE_METADATA_ROOT. The system will
+check this environemnt variable first; should there be no value present,
+the system will fall back to the old variable.
+"""
+
+GCE_METADATA_ROOT = "GCE_METADATA_ROOT"
+"""Old environment variable for GCE_METADATA_HOST."""
+
+GCE_METADATA_IP = "GCE_METADATA_IP"
+"""Environment variable providing an alternate ip:port to be used for ip-only
+GCE metadata requests."""
+
+GOOGLE_API_USE_CLIENT_CERTIFICATE = "GOOGLE_API_USE_CLIENT_CERTIFICATE"
+"""Environment variable controlling whether to use client certificate or not.
+
+The default value is false. Users have to explicitly set this value to true
+in order to use client certificate to establish a mutual TLS channel."""
+
+LEGACY_APPENGINE_RUNTIME = "APPENGINE_RUNTIME"
+"""Gen1 environment variable defining the App Engine Runtime.
+
+Used to distinguish between GAE gen1 and GAE gen2+.
+"""
+
+# AWS environment variables used with AWS workload identity pools to retrieve
+# AWS security credentials and the AWS region needed to create a serialized
+# signed requests to the AWS STS GetCalledIdentity API that can be exchanged
+# for a Google access tokens via the GCP STS endpoint.
+# When not available the AWS metadata server is used to retrieve these values.
+AWS_ACCESS_KEY_ID = "AWS_ACCESS_KEY_ID"
+AWS_SECRET_ACCESS_KEY = "AWS_SECRET_ACCESS_KEY"
+AWS_SESSION_TOKEN = "AWS_SESSION_TOKEN"
+AWS_REGION = "AWS_REGION"
+AWS_DEFAULT_REGION = "AWS_DEFAULT_REGION"
diff --git a/Lib/site-packages/google/auth/exceptions.py b/Lib/site-packages/google/auth/exceptions.py
new file mode 100644
index 0000000..fcbe61b
--- /dev/null
+++ b/Lib/site-packages/google/auth/exceptions.py
@@ -0,0 +1,100 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Exceptions used in the google.auth package."""
+
+
+class GoogleAuthError(Exception):
+ """Base class for all google.auth errors."""
+
+ def __init__(self, *args, **kwargs):
+ super(GoogleAuthError, self).__init__(*args)
+ retryable = kwargs.get("retryable", False)
+ self._retryable = retryable
+
+ @property
+ def retryable(self):
+ return self._retryable
+
+
+class TransportError(GoogleAuthError):
+ """Used to indicate an error occurred during an HTTP request."""
+
+
+class RefreshError(GoogleAuthError):
+ """Used to indicate that an refreshing the credentials' access token
+ failed."""
+
+
+class UserAccessTokenError(GoogleAuthError):
+ """Used to indicate ``gcloud auth print-access-token`` command failed."""
+
+
+class DefaultCredentialsError(GoogleAuthError):
+ """Used to indicate that acquiring default credentials failed."""
+
+
+class MutualTLSChannelError(GoogleAuthError):
+ """Used to indicate that mutual TLS channel creation is failed, or mutual
+ TLS channel credentials is missing or invalid."""
+
+
+class ClientCertError(GoogleAuthError):
+ """Used to indicate that client certificate is missing or invalid."""
+
+ @property
+ def retryable(self):
+ return False
+
+
+class OAuthError(GoogleAuthError):
+ """Used to indicate an error occurred during an OAuth related HTTP
+ request."""
+
+
+class ReauthFailError(RefreshError):
+ """An exception for when reauth failed."""
+
+ def __init__(self, message=None, **kwargs):
+ super(ReauthFailError, self).__init__(
+ "Reauthentication failed. {0}".format(message), **kwargs
+ )
+
+
+class ReauthSamlChallengeFailError(ReauthFailError):
+ """An exception for SAML reauth challenge failures."""
+
+
+class MalformedError(DefaultCredentialsError, ValueError):
+ """An exception for malformed data."""
+
+
+class InvalidResource(DefaultCredentialsError, ValueError):
+ """An exception for URL error."""
+
+
+class InvalidOperation(DefaultCredentialsError, ValueError):
+ """An exception for invalid operation."""
+
+
+class InvalidValue(DefaultCredentialsError, ValueError):
+ """Used to wrap general ValueError of python."""
+
+
+class InvalidType(DefaultCredentialsError, TypeError):
+ """Used to wrap general TypeError of python."""
+
+
+class OSError(DefaultCredentialsError, EnvironmentError):
+ """Used to wrap EnvironmentError(OSError after python3.3)."""
diff --git a/Lib/site-packages/google/auth/external_account.py b/Lib/site-packages/google/auth/external_account.py
new file mode 100644
index 0000000..c314ea7
--- /dev/null
+++ b/Lib/site-packages/google/auth/external_account.py
@@ -0,0 +1,534 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""External Account Credentials.
+
+This module provides credentials that exchange workload identity pool external
+credentials for Google access tokens. This facilitates accessing Google Cloud
+Platform resources from on-prem and non-Google Cloud platforms (e.g. AWS,
+Microsoft Azure, OIDC identity providers), using native credentials retrieved
+from the current environment without the need to copy, save and manage
+long-lived service account credentials.
+
+Specifically, this is intended to use access tokens acquired using the GCP STS
+token exchange endpoint following the `OAuth 2.0 Token Exchange`_ spec.
+
+.. _OAuth 2.0 Token Exchange: https://tools.ietf.org/html/rfc8693
+"""
+
+import abc
+import copy
+import datetime
+import io
+import json
+import re
+
+from google.auth import _helpers
+from google.auth import credentials
+from google.auth import exceptions
+from google.auth import impersonated_credentials
+from google.auth import metrics
+from google.oauth2 import sts
+from google.oauth2 import utils
+
+# External account JSON type identifier.
+_EXTERNAL_ACCOUNT_JSON_TYPE = "external_account"
+# The token exchange grant_type used for exchanging credentials.
+_STS_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:token-exchange"
+# The token exchange requested_token_type. This is always an access_token.
+_STS_REQUESTED_TOKEN_TYPE = "urn:ietf:params:oauth:token-type:access_token"
+# Cloud resource manager URL used to retrieve project information.
+_CLOUD_RESOURCE_MANAGER = "https://cloudresourcemanager.googleapis.com/v1/projects/"
+
+_DEFAULT_UNIVERSE_DOMAIN = "googleapis.com"
+
+
+class Credentials(
+ credentials.Scoped,
+ credentials.CredentialsWithQuotaProject,
+ credentials.CredentialsWithTokenUri,
+ metaclass=abc.ABCMeta,
+):
+ """Base class for all external account credentials.
+
+ This is used to instantiate Credentials for exchanging external account
+ credentials for Google access token and authorizing requests to Google APIs.
+ The base class implements the common logic for exchanging external account
+ credentials for Google access tokens.
+ """
+
+ def __init__(
+ self,
+ audience,
+ subject_token_type,
+ token_url,
+ credential_source,
+ service_account_impersonation_url=None,
+ service_account_impersonation_options=None,
+ client_id=None,
+ client_secret=None,
+ token_info_url=None,
+ quota_project_id=None,
+ scopes=None,
+ default_scopes=None,
+ workforce_pool_user_project=None,
+ universe_domain=_DEFAULT_UNIVERSE_DOMAIN,
+ trust_boundary=None,
+ ):
+ """Instantiates an external account credentials object.
+
+ Args:
+ audience (str): The STS audience field.
+ subject_token_type (str): The subject token type.
+ token_url (str): The STS endpoint URL.
+ credential_source (Mapping): The credential source dictionary.
+ service_account_impersonation_url (Optional[str]): The optional service account
+ impersonation generateAccessToken URL.
+ client_id (Optional[str]): The optional client ID.
+ client_secret (Optional[str]): The optional client secret.
+ token_info_url (str): The optional STS endpoint URL for token introspection.
+ quota_project_id (Optional[str]): The optional quota project ID.
+ scopes (Optional[Sequence[str]]): Optional scopes to request during the
+ authorization grant.
+ default_scopes (Optional[Sequence[str]]): Default scopes passed by a
+ Google client library. Use 'scopes' for user-defined scopes.
+ workforce_pool_user_project (Optona[str]): The optional workforce pool user
+ project number when the credential corresponds to a workforce pool and not
+ a workload identity pool. The underlying principal must still have
+ serviceusage.services.use IAM permission to use the project for
+ billing/quota.
+ universe_domain (str): The universe domain. The default universe
+ domain is googleapis.com.
+ trust_boundary (str): String representation of trust boundary meta.
+ Raises:
+ google.auth.exceptions.RefreshError: If the generateAccessToken
+ endpoint returned an error.
+ """
+ super(Credentials, self).__init__()
+ self._audience = audience
+ self._subject_token_type = subject_token_type
+ self._token_url = token_url
+ self._token_info_url = token_info_url
+ self._credential_source = credential_source
+ self._service_account_impersonation_url = service_account_impersonation_url
+ self._service_account_impersonation_options = (
+ service_account_impersonation_options or {}
+ )
+ self._client_id = client_id
+ self._client_secret = client_secret
+ self._quota_project_id = quota_project_id
+ self._scopes = scopes
+ self._default_scopes = default_scopes
+ self._workforce_pool_user_project = workforce_pool_user_project
+ self._universe_domain = universe_domain or _DEFAULT_UNIVERSE_DOMAIN
+ self._trust_boundary = {
+ "locations": [],
+ "encoded_locations": "0x0",
+ } # expose a placeholder trust boundary value.
+
+ if self._client_id:
+ self._client_auth = utils.ClientAuthentication(
+ utils.ClientAuthType.basic, self._client_id, self._client_secret
+ )
+ else:
+ self._client_auth = None
+ self._sts_client = sts.Client(self._token_url, self._client_auth)
+
+ self._metrics_options = self._create_default_metrics_options()
+
+ if self._service_account_impersonation_url:
+ self._impersonated_credentials = self._initialize_impersonated_credentials()
+ else:
+ self._impersonated_credentials = None
+ self._project_id = None
+
+ if not self.is_workforce_pool and self._workforce_pool_user_project:
+ # Workload identity pools do not support workforce pool user projects.
+ raise exceptions.InvalidValue(
+ "workforce_pool_user_project should not be set for non-workforce pool "
+ "credentials"
+ )
+
+ @property
+ def info(self):
+ """Generates the dictionary representation of the current credentials.
+
+ Returns:
+ Mapping: The dictionary representation of the credentials. This is the
+ reverse of "from_info" defined on the subclasses of this class. It is
+ useful for serializing the current credentials so it can deserialized
+ later.
+ """
+ config_info = self._constructor_args()
+ config_info.update(
+ type=_EXTERNAL_ACCOUNT_JSON_TYPE,
+ service_account_impersonation=config_info.pop(
+ "service_account_impersonation_options", None
+ ),
+ )
+ config_info.pop("scopes", None)
+ config_info.pop("default_scopes", None)
+ return {key: value for key, value in config_info.items() if value is not None}
+
+ def _constructor_args(self):
+ args = {
+ "audience": self._audience,
+ "subject_token_type": self._subject_token_type,
+ "token_url": self._token_url,
+ "token_info_url": self._token_info_url,
+ "service_account_impersonation_url": self._service_account_impersonation_url,
+ "service_account_impersonation_options": copy.deepcopy(
+ self._service_account_impersonation_options
+ )
+ or None,
+ "credential_source": copy.deepcopy(self._credential_source),
+ "quota_project_id": self._quota_project_id,
+ "client_id": self._client_id,
+ "client_secret": self._client_secret,
+ "workforce_pool_user_project": self._workforce_pool_user_project,
+ "scopes": self._scopes,
+ "default_scopes": self._default_scopes,
+ "universe_domain": self._universe_domain,
+ }
+ if not self.is_workforce_pool:
+ args.pop("workforce_pool_user_project")
+ return args
+
+ @property
+ def service_account_email(self):
+ """Returns the service account email if service account impersonation is used.
+
+ Returns:
+ Optional[str]: The service account email if impersonation is used. Otherwise
+ None is returned.
+ """
+ if self._service_account_impersonation_url:
+ # Parse email from URL. The formal looks as follows:
+ # https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken
+ url = self._service_account_impersonation_url
+ start_index = url.rfind("/")
+ end_index = url.find(":generateAccessToken")
+ if start_index != -1 and end_index != -1 and start_index < end_index:
+ start_index = start_index + 1
+ return url[start_index:end_index]
+ return None
+
+ @property
+ def is_user(self):
+ """Returns whether the credentials represent a user (True) or workload (False).
+ Workloads behave similarly to service accounts. Currently workloads will use
+ service account impersonation but will eventually not require impersonation.
+ As a result, this property is more reliable than the service account email
+ property in determining if the credentials represent a user or workload.
+
+ Returns:
+ bool: True if the credentials represent a user. False if they represent a
+ workload.
+ """
+ # If service account impersonation is used, the credentials will always represent a
+ # service account.
+ if self._service_account_impersonation_url:
+ return False
+ return self.is_workforce_pool
+
+ @property
+ def is_workforce_pool(self):
+ """Returns whether the credentials represent a workforce pool (True) or
+ workload (False) based on the credentials' audience.
+
+ This will also return True for impersonated workforce pool credentials.
+
+ Returns:
+ bool: True if the credentials represent a workforce pool. False if they
+ represent a workload.
+ """
+ # Workforce pools representing users have the following audience format:
+ # //iam.googleapis.com/locations/$location/workforcePools/$poolId/providers/$providerId
+ p = re.compile(r"//iam\.googleapis\.com/locations/[^/]+/workforcePools/")
+ return p.match(self._audience or "") is not None
+
+ @property
+ def requires_scopes(self):
+ """Checks if the credentials requires scopes.
+
+ Returns:
+ bool: True if there are no scopes set otherwise False.
+ """
+ return not self._scopes and not self._default_scopes
+
+ @property
+ def project_number(self):
+ """Optional[str]: The project number corresponding to the workload identity pool."""
+
+ # STS audience pattern:
+ # //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/...
+ components = self._audience.split("/")
+ try:
+ project_index = components.index("projects")
+ if project_index + 1 < len(components):
+ return components[project_index + 1] or None
+ except ValueError:
+ return None
+
+ @property
+ def token_info_url(self):
+ """Optional[str]: The STS token introspection endpoint."""
+
+ return self._token_info_url
+
+ @_helpers.copy_docstring(credentials.Scoped)
+ def with_scopes(self, scopes, default_scopes=None):
+ kwargs = self._constructor_args()
+ kwargs.update(scopes=scopes, default_scopes=default_scopes)
+ scoped = self.__class__(**kwargs)
+ scoped._metrics_options = self._metrics_options
+ return scoped
+
+ @abc.abstractmethod
+ def retrieve_subject_token(self, request):
+ """Retrieves the subject token using the credential_source object.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ Returns:
+ str: The retrieved subject token.
+ """
+ # pylint: disable=missing-raises-doc
+ # (pylint doesn't recognize that this is abstract)
+ raise NotImplementedError("retrieve_subject_token must be implemented")
+
+ def get_project_id(self, request):
+ """Retrieves the project ID corresponding to the workload identity or workforce pool.
+ For workforce pool credentials, it returns the project ID corresponding to
+ the workforce_pool_user_project.
+
+ When not determinable, None is returned.
+
+ This is introduced to support the current pattern of using the Auth library:
+
+ credentials, project_id = google.auth.default()
+
+ The resource may not have permission (resourcemanager.projects.get) to
+ call this API or the required scopes may not be selected:
+ https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ Returns:
+ Optional[str]: The project ID corresponding to the workload identity pool
+ or workforce pool if determinable.
+ """
+ if self._project_id:
+ # If already retrieved, return the cached project ID value.
+ return self._project_id
+ scopes = self._scopes if self._scopes is not None else self._default_scopes
+ # Scopes are required in order to retrieve a valid access token.
+ project_number = self.project_number or self._workforce_pool_user_project
+ if project_number and scopes:
+ headers = {}
+ url = _CLOUD_RESOURCE_MANAGER + project_number
+ self.before_request(request, "GET", url, headers)
+ response = request(url=url, method="GET", headers=headers)
+
+ response_body = (
+ response.data.decode("utf-8")
+ if hasattr(response.data, "decode")
+ else response.data
+ )
+ response_data = json.loads(response_body)
+
+ if response.status == 200:
+ # Cache result as this field is immutable.
+ self._project_id = response_data.get("projectId")
+ return self._project_id
+
+ return None
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def refresh(self, request):
+ scopes = self._scopes if self._scopes is not None else self._default_scopes
+ if self._impersonated_credentials:
+ self._impersonated_credentials.refresh(request)
+ self.token = self._impersonated_credentials.token
+ self.expiry = self._impersonated_credentials.expiry
+ else:
+ now = _helpers.utcnow()
+ additional_options = None
+ # Do not pass workforce_pool_user_project when client authentication
+ # is used. The client ID is sufficient for determining the user project.
+ if self._workforce_pool_user_project and not self._client_id:
+ additional_options = {"userProject": self._workforce_pool_user_project}
+ additional_headers = {
+ metrics.API_CLIENT_HEADER: metrics.byoid_metrics_header(
+ self._metrics_options
+ )
+ }
+ response_data = self._sts_client.exchange_token(
+ request=request,
+ grant_type=_STS_GRANT_TYPE,
+ subject_token=self.retrieve_subject_token(request),
+ subject_token_type=self._subject_token_type,
+ audience=self._audience,
+ scopes=scopes,
+ requested_token_type=_STS_REQUESTED_TOKEN_TYPE,
+ additional_options=additional_options,
+ additional_headers=additional_headers,
+ )
+ self.token = response_data.get("access_token")
+ expires_in = response_data.get("expires_in")
+ # Some services do not respect the OAUTH2.0 RFC and send expires_in as a
+ # JSON String.
+ if isinstance(expires_in, str):
+ expires_in = int(expires_in)
+
+ lifetime = datetime.timedelta(seconds=expires_in)
+
+ self.expiry = now + lifetime
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ # Return copy of instance with the provided quota project ID.
+ kwargs = self._constructor_args()
+ kwargs.update(quota_project_id=quota_project_id)
+ new_cred = self.__class__(**kwargs)
+ new_cred._metrics_options = self._metrics_options
+ return new_cred
+
+ @_helpers.copy_docstring(credentials.CredentialsWithTokenUri)
+ def with_token_uri(self, token_uri):
+ kwargs = self._constructor_args()
+ kwargs.update(token_url=token_uri)
+ new_cred = self.__class__(**kwargs)
+ new_cred._metrics_options = self._metrics_options
+ return new_cred
+
+ @_helpers.copy_docstring(credentials.CredentialsWithUniverseDomain)
+ def with_universe_domain(self, universe_domain):
+ kwargs = self._constructor_args()
+ kwargs.update(universe_domain=universe_domain)
+ new_cred = self.__class__(**kwargs)
+ new_cred._metrics_options = self._metrics_options
+ return new_cred
+
+ def _initialize_impersonated_credentials(self):
+ """Generates an impersonated credentials.
+
+ For more details, see `projects.serviceAccounts.generateAccessToken`_.
+
+ .. _projects.serviceAccounts.generateAccessToken: https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateAccessToken
+
+ Returns:
+ impersonated_credentials.Credential: The impersonated credentials
+ object.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the generateAccessToken
+ endpoint returned an error.
+ """
+ # Return copy of instance with no service account impersonation.
+ kwargs = self._constructor_args()
+ kwargs.update(
+ service_account_impersonation_url=None,
+ service_account_impersonation_options={},
+ )
+ source_credentials = self.__class__(**kwargs)
+ source_credentials._metrics_options = self._metrics_options
+
+ # Determine target_principal.
+ target_principal = self.service_account_email
+ if not target_principal:
+ raise exceptions.RefreshError(
+ "Unable to determine target principal from service account impersonation URL."
+ )
+
+ scopes = self._scopes if self._scopes is not None else self._default_scopes
+ # Initialize and return impersonated credentials.
+ return impersonated_credentials.Credentials(
+ source_credentials=source_credentials,
+ target_principal=target_principal,
+ target_scopes=scopes,
+ quota_project_id=self._quota_project_id,
+ iam_endpoint_override=self._service_account_impersonation_url,
+ lifetime=self._service_account_impersonation_options.get(
+ "token_lifetime_seconds"
+ ),
+ )
+
+ def _create_default_metrics_options(self):
+ metrics_options = {}
+ if self._service_account_impersonation_url:
+ metrics_options["sa-impersonation"] = "true"
+ else:
+ metrics_options["sa-impersonation"] = "false"
+ if self._service_account_impersonation_options.get("token_lifetime_seconds"):
+ metrics_options["config-lifetime"] = "true"
+ else:
+ metrics_options["config-lifetime"] = "false"
+
+ return metrics_options
+
+ @classmethod
+ def from_info(cls, info, **kwargs):
+ """Creates a Credentials instance from parsed external account info.
+
+ Args:
+ info (Mapping[str, str]): The external account info in Google
+ format.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.identity_pool.Credentials: The constructed
+ credentials.
+
+ Raises:
+ InvalidValue: For invalid parameters.
+ """
+ return cls(
+ audience=info.get("audience"),
+ subject_token_type=info.get("subject_token_type"),
+ token_url=info.get("token_url"),
+ token_info_url=info.get("token_info_url"),
+ service_account_impersonation_url=info.get(
+ "service_account_impersonation_url"
+ ),
+ service_account_impersonation_options=info.get(
+ "service_account_impersonation"
+ )
+ or {},
+ client_id=info.get("client_id"),
+ client_secret=info.get("client_secret"),
+ credential_source=info.get("credential_source"),
+ quota_project_id=info.get("quota_project_id"),
+ workforce_pool_user_project=info.get("workforce_pool_user_project"),
+ universe_domain=info.get("universe_domain", _DEFAULT_UNIVERSE_DOMAIN),
+ **kwargs
+ )
+
+ @classmethod
+ def from_file(cls, filename, **kwargs):
+ """Creates a Credentials instance from an external account json file.
+
+ Args:
+ filename (str): The path to the external account json file.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.identity_pool.Credentials: The constructed
+ credentials.
+ """
+ with io.open(filename, "r", encoding="utf-8") as json_file:
+ data = json.load(json_file)
+ return cls.from_info(data, **kwargs)
diff --git a/Lib/site-packages/google/auth/external_account_authorized_user.py b/Lib/site-packages/google/auth/external_account_authorized_user.py
new file mode 100644
index 0000000..526588f
--- /dev/null
+++ b/Lib/site-packages/google/auth/external_account_authorized_user.py
@@ -0,0 +1,363 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""External Account Authorized User Credentials.
+This module provides credentials based on OAuth 2.0 access and refresh tokens.
+These credentials usually access resources on behalf of a user (resource
+owner).
+
+Specifically, these are sourced using external identities via Workforce Identity Federation.
+
+Obtaining the initial access and refresh token can be done through the Google Cloud CLI.
+
+Example credential:
+{
+ "type": "external_account_authorized_user",
+ "audience": "//iam.googleapis.com/locations/global/workforcePools/$WORKFORCE_POOL_ID/providers/$PROVIDER_ID",
+ "refresh_token": "refreshToken",
+ "token_url": "https://sts.googleapis.com/v1/oauth/token",
+ "token_info_url": "https://sts.googleapis.com/v1/instrospect",
+ "client_id": "clientId",
+ "client_secret": "clientSecret"
+}
+"""
+
+import datetime
+import io
+import json
+
+from google.auth import _helpers
+from google.auth import credentials
+from google.auth import exceptions
+from google.oauth2 import sts
+from google.oauth2 import utils
+
+_DEFAULT_UNIVERSE_DOMAIN = "googleapis.com"
+_EXTERNAL_ACCOUNT_AUTHORIZED_USER_JSON_TYPE = "external_account_authorized_user"
+
+
+class Credentials(
+ credentials.CredentialsWithQuotaProject,
+ credentials.ReadOnlyScoped,
+ credentials.CredentialsWithTokenUri,
+):
+ """Credentials for External Account Authorized Users.
+
+ This is used to instantiate Credentials for exchanging refresh tokens from
+ authorized users for Google access token and authorizing requests to Google
+ APIs.
+
+ The credentials are considered immutable. If you want to modify the
+ quota project, use `with_quota_project` and if you want to modify the token
+ uri, use `with_token_uri`.
+ """
+
+ def __init__(
+ self,
+ token=None,
+ expiry=None,
+ refresh_token=None,
+ audience=None,
+ client_id=None,
+ client_secret=None,
+ token_url=None,
+ token_info_url=None,
+ revoke_url=None,
+ scopes=None,
+ quota_project_id=None,
+ universe_domain=_DEFAULT_UNIVERSE_DOMAIN,
+ ):
+ """Instantiates a external account authorized user credentials object.
+
+ Args:
+ token (str): The OAuth 2.0 access token. Can be None if refresh information
+ is provided.
+ expiry (datetime.datetime): The optional expiration datetime of the OAuth 2.0 access
+ token.
+ refresh_token (str): The optional OAuth 2.0 refresh token. If specified,
+ credentials can be refreshed.
+ audience (str): The optional STS audience which contains the resource name for the workforce
+ pool and the provider identifier in that pool.
+ client_id (str): The OAuth 2.0 client ID. Must be specified for refresh, can be left as
+ None if the token can not be refreshed.
+ client_secret (str): The OAuth 2.0 client secret. Must be specified for refresh, can be
+ left as None if the token can not be refreshed.
+ token_url (str): The optional STS token exchange endpoint for refresh. Must be specified for
+ refresh, can be left as None if the token can not be refreshed.
+ token_info_url (str): The optional STS endpoint URL for token introspection.
+ revoke_url (str): The optional STS endpoint URL for revoking tokens.
+ quota_project_id (str): The optional project ID used for quota and billing.
+ This project may be different from the project used to
+ create the credentials.
+ universe_domain (Optional[str]): The universe domain. The default value
+ is googleapis.com.
+
+ Returns:
+ google.auth.external_account_authorized_user.Credentials: The
+ constructed credentials.
+ """
+ super(Credentials, self).__init__()
+
+ self.token = token
+ self.expiry = expiry
+ self._audience = audience
+ self._refresh_token = refresh_token
+ self._token_url = token_url
+ self._token_info_url = token_info_url
+ self._client_id = client_id
+ self._client_secret = client_secret
+ self._revoke_url = revoke_url
+ self._quota_project_id = quota_project_id
+ self._scopes = scopes
+ self._universe_domain = universe_domain or _DEFAULT_UNIVERSE_DOMAIN
+
+ if not self.valid and not self.can_refresh:
+ raise exceptions.InvalidOperation(
+ "Token should be created with fields to make it valid (`token` and "
+ "`expiry`), or fields to allow it to refresh (`refresh_token`, "
+ "`token_url`, `client_id`, `client_secret`)."
+ )
+
+ self._client_auth = None
+ if self._client_id:
+ self._client_auth = utils.ClientAuthentication(
+ utils.ClientAuthType.basic, self._client_id, self._client_secret
+ )
+ self._sts_client = sts.Client(self._token_url, self._client_auth)
+
+ @property
+ def info(self):
+ """Generates the serializable dictionary representation of the current
+ credentials.
+
+ Returns:
+ Mapping: The dictionary representation of the credentials. This is the
+ reverse of the "from_info" method defined in this class. It is
+ useful for serializing the current credentials so it can deserialized
+ later.
+ """
+ config_info = self.constructor_args()
+ config_info.update(type=_EXTERNAL_ACCOUNT_AUTHORIZED_USER_JSON_TYPE)
+ if config_info["expiry"]:
+ config_info["expiry"] = config_info["expiry"].isoformat() + "Z"
+
+ return {key: value for key, value in config_info.items() if value is not None}
+
+ def constructor_args(self):
+ return {
+ "audience": self._audience,
+ "refresh_token": self._refresh_token,
+ "token_url": self._token_url,
+ "token_info_url": self._token_info_url,
+ "client_id": self._client_id,
+ "client_secret": self._client_secret,
+ "token": self.token,
+ "expiry": self.expiry,
+ "revoke_url": self._revoke_url,
+ "scopes": self._scopes,
+ "quota_project_id": self._quota_project_id,
+ "universe_domain": self._universe_domain,
+ }
+
+ @property
+ def scopes(self):
+ """Optional[str]: The OAuth 2.0 permission scopes."""
+ return self._scopes
+
+ @property
+ def requires_scopes(self):
+ """ False: OAuth 2.0 credentials have their scopes set when
+ the initial token is requested and can not be changed."""
+ return False
+
+ @property
+ def client_id(self):
+ """Optional[str]: The OAuth 2.0 client ID."""
+ return self._client_id
+
+ @property
+ def client_secret(self):
+ """Optional[str]: The OAuth 2.0 client secret."""
+ return self._client_secret
+
+ @property
+ def audience(self):
+ """Optional[str]: The STS audience which contains the resource name for the
+ workforce pool and the provider identifier in that pool."""
+ return self._audience
+
+ @property
+ def refresh_token(self):
+ """Optional[str]: The OAuth 2.0 refresh token."""
+ return self._refresh_token
+
+ @property
+ def token_url(self):
+ """Optional[str]: The STS token exchange endpoint for refresh."""
+ return self._token_url
+
+ @property
+ def token_info_url(self):
+ """Optional[str]: The STS endpoint for token info."""
+ return self._token_info_url
+
+ @property
+ def revoke_url(self):
+ """Optional[str]: The STS endpoint for token revocation."""
+ return self._revoke_url
+
+ @property
+ def is_user(self):
+ """ True: This credential always represents a user."""
+ return True
+
+ @property
+ def can_refresh(self):
+ return all(
+ (self._refresh_token, self._token_url, self._client_id, self._client_secret)
+ )
+
+ def get_project_id(self, request=None):
+ """Retrieves the project ID corresponding to the workload identity or workforce pool.
+ For workforce pool credentials, it returns the project ID corresponding to
+ the workforce_pool_user_project.
+
+ When not determinable, None is returned.
+
+ Args:
+ request (google.auth.transport.requests.Request): Request object.
+ Unused here, but passed from _default.default().
+
+ Return:
+ str: project ID is not determinable for this credential type so it returns None
+ """
+
+ return None
+
+ def to_json(self, strip=None):
+ """Utility function that creates a JSON representation of this
+ credential.
+ Args:
+ strip (Sequence[str]): Optional list of members to exclude from the
+ generated JSON.
+ Returns:
+ str: A JSON representation of this instance. When converted into
+ a dictionary, it can be passed to from_info()
+ to create a new instance.
+ """
+ strip = strip if strip else []
+ return json.dumps({k: v for (k, v) in self.info.items() if k not in strip})
+
+ def refresh(self, request):
+ """Refreshes the access token.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the credentials could
+ not be refreshed.
+ """
+ if not self.can_refresh:
+ raise exceptions.RefreshError(
+ "The credentials do not contain the necessary fields need to "
+ "refresh the access token. You must specify refresh_token, "
+ "token_url, client_id, and client_secret."
+ )
+
+ now = _helpers.utcnow()
+ response_data = self._make_sts_request(request)
+
+ self.token = response_data.get("access_token")
+
+ lifetime = datetime.timedelta(seconds=response_data.get("expires_in"))
+ self.expiry = now + lifetime
+
+ if "refresh_token" in response_data:
+ self._refresh_token = response_data["refresh_token"]
+
+ def _make_sts_request(self, request):
+ return self._sts_client.refresh_token(request, self._refresh_token)
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ kwargs = self.constructor_args()
+ kwargs.update(quota_project_id=quota_project_id)
+ return self.__class__(**kwargs)
+
+ @_helpers.copy_docstring(credentials.CredentialsWithTokenUri)
+ def with_token_uri(self, token_uri):
+ kwargs = self.constructor_args()
+ kwargs.update(token_url=token_uri)
+ return self.__class__(**kwargs)
+
+ @_helpers.copy_docstring(credentials.CredentialsWithUniverseDomain)
+ def with_universe_domain(self, universe_domain):
+ kwargs = self.constructor_args()
+ kwargs.update(universe_domain=universe_domain)
+ return self.__class__(**kwargs)
+
+ @classmethod
+ def from_info(cls, info, **kwargs):
+ """Creates a Credentials instance from parsed external account info.
+
+ Args:
+ info (Mapping[str, str]): The external account info in Google
+ format.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.external_account_authorized_user.Credentials: The
+ constructed credentials.
+
+ Raises:
+ ValueError: For invalid parameters.
+ """
+ expiry = info.get("expiry")
+ if expiry:
+ expiry = datetime.datetime.strptime(
+ expiry.rstrip("Z").split(".")[0], "%Y-%m-%dT%H:%M:%S"
+ )
+ return cls(
+ audience=info.get("audience"),
+ refresh_token=info.get("refresh_token"),
+ token_url=info.get("token_url"),
+ token_info_url=info.get("token_info_url"),
+ client_id=info.get("client_id"),
+ client_secret=info.get("client_secret"),
+ token=info.get("token"),
+ expiry=expiry,
+ revoke_url=info.get("revoke_url"),
+ quota_project_id=info.get("quota_project_id"),
+ scopes=info.get("scopes"),
+ universe_domain=info.get("universe_domain", _DEFAULT_UNIVERSE_DOMAIN),
+ **kwargs
+ )
+
+ @classmethod
+ def from_file(cls, filename, **kwargs):
+ """Creates a Credentials instance from an external account json file.
+
+ Args:
+ filename (str): The path to the external account json file.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.external_account_authorized_user.Credentials: The
+ constructed credentials.
+ """
+ with io.open(filename, "r", encoding="utf-8") as json_file:
+ data = json.load(json_file)
+ return cls.from_info(data, **kwargs)
diff --git a/Lib/site-packages/google/auth/iam.py b/Lib/site-packages/google/auth/iam.py
new file mode 100644
index 0000000..e9df844
--- /dev/null
+++ b/Lib/site-packages/google/auth/iam.py
@@ -0,0 +1,99 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tools for using the Google `Cloud Identity and Access Management (IAM)
+API`_'s auth-related functionality.
+
+.. _Cloud Identity and Access Management (IAM) API:
+ https://cloud.google.com/iam/docs/
+"""
+
+import base64
+import http.client as http_client
+import json
+
+from google.auth import _helpers
+from google.auth import crypt
+from google.auth import exceptions
+
+_IAM_API_ROOT_URI = "https://iamcredentials.googleapis.com/v1"
+_SIGN_BLOB_URI = _IAM_API_ROOT_URI + "/projects/-/serviceAccounts/{}:signBlob?alt=json"
+
+
+class Signer(crypt.Signer):
+ """Signs messages using the IAM `signBlob API`_.
+
+ This is useful when you need to sign bytes but do not have access to the
+ credential's private key file.
+
+ .. _signBlob API:
+ https://cloud.google.com/iam/reference/rest/v1/projects.serviceAccounts
+ /signBlob
+ """
+
+ def __init__(self, request, credentials, service_account_email):
+ """
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ credentials (google.auth.credentials.Credentials): The credentials
+ that will be used to authenticate the request to the IAM API.
+ The credentials must have of one the following scopes:
+
+ - https://www.googleapis.com/auth/iam
+ - https://www.googleapis.com/auth/cloud-platform
+ service_account_email (str): The service account email identifying
+ which service account to use to sign bytes. Often, this can
+ be the same as the service account email in the given
+ credentials.
+ """
+ self._request = request
+ self._credentials = credentials
+ self._service_account_email = service_account_email
+
+ def _make_signing_request(self, message):
+ """Makes a request to the API signBlob API."""
+ message = _helpers.to_bytes(message)
+
+ method = "POST"
+ url = _SIGN_BLOB_URI.format(self._service_account_email)
+ headers = {"Content-Type": "application/json"}
+ body = json.dumps(
+ {"payload": base64.b64encode(message).decode("utf-8")}
+ ).encode("utf-8")
+
+ self._credentials.before_request(self._request, method, url, headers)
+ response = self._request(url=url, method=method, body=body, headers=headers)
+
+ if response.status != http_client.OK:
+ raise exceptions.TransportError(
+ "Error calling the IAM signBlob API: {}".format(response.data)
+ )
+
+ return json.loads(response.data.decode("utf-8"))
+
+ @property
+ def key_id(self):
+ """Optional[str]: The key ID used to identify this private key.
+
+ .. warning::
+ This is always ``None``. The key ID used by IAM can not
+ be reliably determined ahead of time.
+ """
+ return None
+
+ @_helpers.copy_docstring(crypt.Signer)
+ def sign(self, message):
+ response = self._make_signing_request(message)
+ return base64.b64decode(response["signedBlob"])
diff --git a/Lib/site-packages/google/auth/identity_pool.py b/Lib/site-packages/google/auth/identity_pool.py
new file mode 100644
index 0000000..a515353
--- /dev/null
+++ b/Lib/site-packages/google/auth/identity_pool.py
@@ -0,0 +1,261 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Identity Pool Credentials.
+
+This module provides credentials to access Google Cloud resources from on-prem
+or non-Google Cloud platforms which support external credentials (e.g. OIDC ID
+tokens) retrieved from local file locations or local servers. This includes
+Microsoft Azure and OIDC identity providers (e.g. K8s workloads registered with
+Hub with Hub workload identity enabled).
+
+These credentials are recommended over the use of service account credentials
+in on-prem/non-Google Cloud platforms as they do not involve the management of
+long-live service account private keys.
+
+Identity Pool Credentials are initialized using external_account
+arguments which are typically loaded from an external credentials file or
+an external credentials URL. Unlike other Credentials that can be initialized
+with a list of explicit arguments, secrets or credentials, external account
+clients use the environment and hints/guidelines provided by the
+external_account JSON file to retrieve credentials and exchange them for Google
+access tokens.
+"""
+
+try:
+ from collections.abc import Mapping
+# Python 2.7 compatibility
+except ImportError: # pragma: NO COVER
+ from collections import Mapping
+import io
+import json
+import os
+
+from google.auth import _helpers
+from google.auth import exceptions
+from google.auth import external_account
+
+
+class Credentials(external_account.Credentials):
+ """External account credentials sourced from files and URLs."""
+
+ def __init__(
+ self,
+ audience,
+ subject_token_type,
+ token_url,
+ credential_source,
+ *args,
+ **kwargs
+ ):
+ """Instantiates an external account credentials object from a file/URL.
+
+ Args:
+ audience (str): The STS audience field.
+ subject_token_type (str): The subject token type.
+ token_url (str): The STS endpoint URL.
+ credential_source (Mapping): The credential source dictionary used to
+ provide instructions on how to retrieve external credential to be
+ exchanged for Google access tokens.
+
+ Example credential_source for url-sourced credential::
+
+ {
+ "url": "http://www.example.com",
+ "format": {
+ "type": "json",
+ "subject_token_field_name": "access_token",
+ },
+ "headers": {"foo": "bar"},
+ }
+
+ Example credential_source for file-sourced credential::
+
+ {
+ "file": "/path/to/token/file.txt"
+ }
+ args (List): Optional positional arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
+ kwargs (Mapping): Optional keyword arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If an error is encountered during
+ access token retrieval logic.
+ ValueError: For invalid parameters.
+
+ .. note:: Typically one of the helper constructors
+ :meth:`from_file` or
+ :meth:`from_info` are used instead of calling the constructor directly.
+ """
+
+ super(Credentials, self).__init__(
+ audience=audience,
+ subject_token_type=subject_token_type,
+ token_url=token_url,
+ credential_source=credential_source,
+ *args,
+ **kwargs
+ )
+ if not isinstance(credential_source, Mapping):
+ self._credential_source_file = None
+ self._credential_source_url = None
+ else:
+ self._credential_source_file = credential_source.get("file")
+ self._credential_source_url = credential_source.get("url")
+ self._credential_source_headers = credential_source.get("headers")
+ credential_source_format = credential_source.get("format", {})
+ # Get credential_source format type. When not provided, this
+ # defaults to text.
+ self._credential_source_format_type = (
+ credential_source_format.get("type") or "text"
+ )
+ # environment_id is only supported in AWS or dedicated future external
+ # account credentials.
+ if "environment_id" in credential_source:
+ raise exceptions.MalformedError(
+ "Invalid Identity Pool credential_source field 'environment_id'"
+ )
+ if self._credential_source_format_type not in ["text", "json"]:
+ raise exceptions.MalformedError(
+ "Invalid credential_source format '{}'".format(
+ self._credential_source_format_type
+ )
+ )
+ # For JSON types, get the required subject_token field name.
+ if self._credential_source_format_type == "json":
+ self._credential_source_field_name = credential_source_format.get(
+ "subject_token_field_name"
+ )
+ if self._credential_source_field_name is None:
+ raise exceptions.MalformedError(
+ "Missing subject_token_field_name for JSON credential_source format"
+ )
+ else:
+ self._credential_source_field_name = None
+
+ if self._credential_source_file and self._credential_source_url:
+ raise exceptions.MalformedError(
+ "Ambiguous credential_source. 'file' is mutually exclusive with 'url'."
+ )
+ if not self._credential_source_file and not self._credential_source_url:
+ raise exceptions.MalformedError(
+ "Missing credential_source. A 'file' or 'url' must be provided."
+ )
+
+ @_helpers.copy_docstring(external_account.Credentials)
+ def retrieve_subject_token(self, request):
+ return self._parse_token_data(
+ self._get_token_data(request),
+ self._credential_source_format_type,
+ self._credential_source_field_name,
+ )
+
+ def _get_token_data(self, request):
+ if self._credential_source_file:
+ return self._get_file_data(self._credential_source_file)
+ else:
+ return self._get_url_data(
+ request, self._credential_source_url, self._credential_source_headers
+ )
+
+ def _get_file_data(self, filename):
+ if not os.path.exists(filename):
+ raise exceptions.RefreshError("File '{}' was not found.".format(filename))
+
+ with io.open(filename, "r", encoding="utf-8") as file_obj:
+ return file_obj.read(), filename
+
+ def _get_url_data(self, request, url, headers):
+ response = request(url=url, method="GET", headers=headers)
+
+ # support both string and bytes type response.data
+ response_body = (
+ response.data.decode("utf-8")
+ if hasattr(response.data, "decode")
+ else response.data
+ )
+
+ if response.status != 200:
+ raise exceptions.RefreshError(
+ "Unable to retrieve Identity Pool subject token", response_body
+ )
+
+ return response_body, url
+
+ def _parse_token_data(
+ self, token_content, format_type="text", subject_token_field_name=None
+ ):
+ content, filename = token_content
+ if format_type == "text":
+ token = content
+ else:
+ try:
+ # Parse file content as JSON.
+ response_data = json.loads(content)
+ # Get the subject_token.
+ token = response_data[subject_token_field_name]
+ except (KeyError, ValueError):
+ raise exceptions.RefreshError(
+ "Unable to parse subject_token from JSON file '{}' using key '{}'".format(
+ filename, subject_token_field_name
+ )
+ )
+ if not token:
+ raise exceptions.RefreshError(
+ "Missing subject_token in the credential_source file"
+ )
+ return token
+
+ def _create_default_metrics_options(self):
+ metrics_options = super(Credentials, self)._create_default_metrics_options()
+ # Check that credential source is a dict before checking for file vs url. This check needs to be done
+ # here because the external_account credential constructor needs to pass the metrics options to the
+ # impersonated credential object before the identity_pool credentials are validated.
+ if isinstance(self._credential_source, Mapping):
+ if self._credential_source.get("file"):
+ metrics_options["source"] = "file"
+ else:
+ metrics_options["source"] = "url"
+ return metrics_options
+
+ @classmethod
+ def from_info(cls, info, **kwargs):
+ """Creates an Identity Pool Credentials instance from parsed external account info.
+
+ Args:
+ info (Mapping[str, str]): The Identity Pool external account info in Google
+ format.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.identity_pool.Credentials: The constructed
+ credentials.
+
+ Raises:
+ ValueError: For invalid parameters.
+ """
+ return super(Credentials, cls).from_info(info, **kwargs)
+
+ @classmethod
+ def from_file(cls, filename, **kwargs):
+ """Creates an IdentityPool Credentials instance from an external account json file.
+
+ Args:
+ filename (str): The path to the IdentityPool external account json file.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.identity_pool.Credentials: The constructed
+ credentials.
+ """
+ return super(Credentials, cls).from_file(filename, **kwargs)
diff --git a/Lib/site-packages/google/auth/impersonated_credentials.py b/Lib/site-packages/google/auth/impersonated_credentials.py
new file mode 100644
index 0000000..d32e6eb
--- /dev/null
+++ b/Lib/site-packages/google/auth/impersonated_credentials.py
@@ -0,0 +1,465 @@
+# Copyright 2018 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google Cloud Impersonated credentials.
+
+This module provides authentication for applications where local credentials
+impersonates a remote service account using `IAM Credentials API`_.
+
+This class can be used to impersonate a service account as long as the original
+Credential object has the "Service Account Token Creator" role on the target
+service account.
+
+ .. _IAM Credentials API:
+ https://cloud.google.com/iam/credentials/reference/rest/
+"""
+
+import base64
+import copy
+from datetime import datetime
+import http.client as http_client
+import json
+
+from google.auth import _helpers
+from google.auth import credentials
+from google.auth import exceptions
+from google.auth import jwt
+from google.auth import metrics
+
+_IAM_SCOPE = ["https://www.googleapis.com/auth/iam"]
+
+_IAM_ENDPOINT = (
+ "https://iamcredentials.googleapis.com/v1/projects/-"
+ + "/serviceAccounts/{}:generateAccessToken"
+)
+
+_IAM_SIGN_ENDPOINT = (
+ "https://iamcredentials.googleapis.com/v1/projects/-"
+ + "/serviceAccounts/{}:signBlob"
+)
+
+_IAM_IDTOKEN_ENDPOINT = (
+ "https://iamcredentials.googleapis.com/v1/"
+ + "projects/-/serviceAccounts/{}:generateIdToken"
+)
+
+_REFRESH_ERROR = "Unable to acquire impersonated credentials"
+
+_DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
+
+_DEFAULT_TOKEN_URI = "https://oauth2.googleapis.com/token"
+
+
+def _make_iam_token_request(
+ request, principal, headers, body, iam_endpoint_override=None
+):
+ """Makes a request to the Google Cloud IAM service for an access token.
+ Args:
+ request (Request): The Request object to use.
+ principal (str): The principal to request an access token for.
+ headers (Mapping[str, str]): Map of headers to transmit.
+ body (Mapping[str, str]): JSON Payload body for the iamcredentials
+ API call.
+ iam_endpoint_override (Optiona[str]): The full IAM endpoint override
+ with the target_principal embedded. This is useful when supporting
+ impersonation with regional endpoints.
+
+ Raises:
+ google.auth.exceptions.TransportError: Raised if there is an underlying
+ HTTP connection error
+ google.auth.exceptions.RefreshError: Raised if the impersonated
+ credentials are not available. Common reasons are
+ `iamcredentials.googleapis.com` is not enabled or the
+ `Service Account Token Creator` is not assigned
+ """
+ iam_endpoint = iam_endpoint_override or _IAM_ENDPOINT.format(principal)
+
+ body = json.dumps(body).encode("utf-8")
+
+ response = request(url=iam_endpoint, method="POST", headers=headers, body=body)
+
+ # support both string and bytes type response.data
+ response_body = (
+ response.data.decode("utf-8")
+ if hasattr(response.data, "decode")
+ else response.data
+ )
+
+ if response.status != http_client.OK:
+ raise exceptions.RefreshError(_REFRESH_ERROR, response_body)
+
+ try:
+ token_response = json.loads(response_body)
+ token = token_response["accessToken"]
+ expiry = datetime.strptime(token_response["expireTime"], "%Y-%m-%dT%H:%M:%SZ")
+
+ return token, expiry
+
+ except (KeyError, ValueError) as caught_exc:
+ new_exc = exceptions.RefreshError(
+ "{}: No access token or invalid expiration in response.".format(
+ _REFRESH_ERROR
+ ),
+ response_body,
+ )
+ raise new_exc from caught_exc
+
+
+class Credentials(
+ credentials.Scoped, credentials.CredentialsWithQuotaProject, credentials.Signing
+):
+ """This module defines impersonated credentials which are essentially
+ impersonated identities.
+
+ Impersonated Credentials allows credentials issued to a user or
+ service account to impersonate another. The target service account must
+ grant the originating credential principal the
+ `Service Account Token Creator`_ IAM role:
+
+ For more information about Token Creator IAM role and
+ IAMCredentials API, see
+ `Creating Short-Lived Service Account Credentials`_.
+
+ .. _Service Account Token Creator:
+ https://cloud.google.com/iam/docs/service-accounts#the_service_account_token_creator_role
+
+ .. _Creating Short-Lived Service Account Credentials:
+ https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials
+
+ Usage:
+
+ First grant source_credentials the `Service Account Token Creator`
+ role on the target account to impersonate. In this example, the
+ service account represented by svc_account.json has the
+ token creator role on
+ `impersonated-account@_project_.iam.gserviceaccount.com`.
+
+ Enable the IAMCredentials API on the source project:
+ `gcloud services enable iamcredentials.googleapis.com`.
+
+ Initialize a source credential which does not have access to
+ list bucket::
+
+ from google.oauth2 import service_account
+
+ target_scopes = [
+ 'https://www.googleapis.com/auth/devstorage.read_only']
+
+ source_credentials = (
+ service_account.Credentials.from_service_account_file(
+ '/path/to/svc_account.json',
+ scopes=target_scopes))
+
+ Now use the source credentials to acquire credentials to impersonate
+ another service account::
+
+ from google.auth import impersonated_credentials
+
+ target_credentials = impersonated_credentials.Credentials(
+ source_credentials=source_credentials,
+ target_principal='impersonated-account@_project_.iam.gserviceaccount.com',
+ target_scopes = target_scopes,
+ lifetime=500)
+
+ Resource access is granted::
+
+ client = storage.Client(credentials=target_credentials)
+ buckets = client.list_buckets(project='your_project')
+ for bucket in buckets:
+ print(bucket.name)
+ """
+
+ def __init__(
+ self,
+ source_credentials,
+ target_principal,
+ target_scopes,
+ delegates=None,
+ lifetime=_DEFAULT_TOKEN_LIFETIME_SECS,
+ quota_project_id=None,
+ iam_endpoint_override=None,
+ ):
+ """
+ Args:
+ source_credentials (google.auth.Credentials): The source credential
+ used as to acquire the impersonated credentials.
+ target_principal (str): The service account to impersonate.
+ target_scopes (Sequence[str]): Scopes to request during the
+ authorization grant.
+ delegates (Sequence[str]): The chained list of delegates required
+ to grant the final access_token. If set, the sequence of
+ identities must have "Service Account Token Creator" capability
+ granted to the prceeding identity. For example, if set to
+ [serviceAccountB, serviceAccountC], the source_credential
+ must have the Token Creator role on serviceAccountB.
+ serviceAccountB must have the Token Creator on
+ serviceAccountC.
+ Finally, C must have Token Creator on target_principal.
+ If left unset, source_credential must have that role on
+ target_principal.
+ lifetime (int): Number of seconds the delegated credential should
+ be valid for (upto 3600).
+ quota_project_id (Optional[str]): The project ID used for quota and billing.
+ This project may be different from the project used to
+ create the credentials.
+ iam_endpoint_override (Optiona[str]): The full IAM endpoint override
+ with the target_principal embedded. This is useful when supporting
+ impersonation with regional endpoints.
+ """
+
+ super(Credentials, self).__init__()
+
+ self._source_credentials = copy.copy(source_credentials)
+ # Service account source credentials must have the _IAM_SCOPE
+ # added to refresh correctly. User credentials cannot have
+ # their original scopes modified.
+ if isinstance(self._source_credentials, credentials.Scoped):
+ self._source_credentials = self._source_credentials.with_scopes(_IAM_SCOPE)
+ # If the source credential is service account and self signed jwt
+ # is needed, we need to create a jwt credential inside it
+ if (
+ hasattr(self._source_credentials, "_create_self_signed_jwt")
+ and self._source_credentials._always_use_jwt_access
+ ):
+ self._source_credentials._create_self_signed_jwt(None)
+ self._target_principal = target_principal
+ self._target_scopes = target_scopes
+ self._delegates = delegates
+ self._lifetime = lifetime or _DEFAULT_TOKEN_LIFETIME_SECS
+ self.token = None
+ self.expiry = _helpers.utcnow()
+ self._quota_project_id = quota_project_id
+ self._iam_endpoint_override = iam_endpoint_override
+
+ def _metric_header_for_usage(self):
+ return metrics.CRED_TYPE_SA_IMPERSONATE
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def refresh(self, request):
+ self._update_token(request)
+
+ def _update_token(self, request):
+ """Updates credentials with a new access_token representing
+ the impersonated account.
+
+ Args:
+ request (google.auth.transport.requests.Request): Request object
+ to use for refreshing credentials.
+ """
+
+ # Refresh our source credentials if it is not valid.
+ if (
+ self._source_credentials.token_state == credentials.TokenState.STALE
+ or self._source_credentials.token_state == credentials.TokenState.INVALID
+ ):
+ self._source_credentials.refresh(request)
+
+ body = {
+ "delegates": self._delegates,
+ "scope": self._target_scopes,
+ "lifetime": str(self._lifetime) + "s",
+ }
+
+ headers = {
+ "Content-Type": "application/json",
+ metrics.API_CLIENT_HEADER: metrics.token_request_access_token_impersonate(),
+ }
+
+ # Apply the source credentials authentication info.
+ self._source_credentials.apply(headers)
+
+ self.token, self.expiry = _make_iam_token_request(
+ request=request,
+ principal=self._target_principal,
+ headers=headers,
+ body=body,
+ iam_endpoint_override=self._iam_endpoint_override,
+ )
+
+ def sign_bytes(self, message):
+ from google.auth.transport.requests import AuthorizedSession
+
+ iam_sign_endpoint = _IAM_SIGN_ENDPOINT.format(self._target_principal)
+
+ body = {
+ "payload": base64.b64encode(message).decode("utf-8"),
+ "delegates": self._delegates,
+ }
+
+ headers = {"Content-Type": "application/json"}
+
+ authed_session = AuthorizedSession(self._source_credentials)
+
+ try:
+ response = authed_session.post(
+ url=iam_sign_endpoint, headers=headers, json=body
+ )
+ finally:
+ authed_session.close()
+
+ if response.status_code != http_client.OK:
+ raise exceptions.TransportError(
+ "Error calling sign_bytes: {}".format(response.json())
+ )
+
+ return base64.b64decode(response.json()["signedBlob"])
+
+ @property
+ def signer_email(self):
+ return self._target_principal
+
+ @property
+ def service_account_email(self):
+ return self._target_principal
+
+ @property
+ def signer(self):
+ return self
+
+ @property
+ def requires_scopes(self):
+ return not self._target_scopes
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ return self.__class__(
+ self._source_credentials,
+ target_principal=self._target_principal,
+ target_scopes=self._target_scopes,
+ delegates=self._delegates,
+ lifetime=self._lifetime,
+ quota_project_id=quota_project_id,
+ iam_endpoint_override=self._iam_endpoint_override,
+ )
+
+ @_helpers.copy_docstring(credentials.Scoped)
+ def with_scopes(self, scopes, default_scopes=None):
+ return self.__class__(
+ self._source_credentials,
+ target_principal=self._target_principal,
+ target_scopes=scopes or default_scopes,
+ delegates=self._delegates,
+ lifetime=self._lifetime,
+ quota_project_id=self._quota_project_id,
+ iam_endpoint_override=self._iam_endpoint_override,
+ )
+
+
+class IDTokenCredentials(credentials.CredentialsWithQuotaProject):
+ """Open ID Connect ID Token-based service account credentials.
+
+ """
+
+ def __init__(
+ self,
+ target_credentials,
+ target_audience=None,
+ include_email=False,
+ quota_project_id=None,
+ ):
+ """
+ Args:
+ target_credentials (google.auth.Credentials): The target
+ credential used as to acquire the id tokens for.
+ target_audience (string): Audience to issue the token for.
+ include_email (bool): Include email in IdToken
+ quota_project_id (Optional[str]): The project ID used for
+ quota and billing.
+ """
+ super(IDTokenCredentials, self).__init__()
+
+ if not isinstance(target_credentials, Credentials):
+ raise exceptions.GoogleAuthError(
+ "Provided Credential must be " "impersonated_credentials"
+ )
+ self._target_credentials = target_credentials
+ self._target_audience = target_audience
+ self._include_email = include_email
+ self._quota_project_id = quota_project_id
+
+ def from_credentials(self, target_credentials, target_audience=None):
+ return self.__class__(
+ target_credentials=target_credentials,
+ target_audience=target_audience,
+ include_email=self._include_email,
+ quota_project_id=self._quota_project_id,
+ )
+
+ def with_target_audience(self, target_audience):
+ return self.__class__(
+ target_credentials=self._target_credentials,
+ target_audience=target_audience,
+ include_email=self._include_email,
+ quota_project_id=self._quota_project_id,
+ )
+
+ def with_include_email(self, include_email):
+ return self.__class__(
+ target_credentials=self._target_credentials,
+ target_audience=self._target_audience,
+ include_email=include_email,
+ quota_project_id=self._quota_project_id,
+ )
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ return self.__class__(
+ target_credentials=self._target_credentials,
+ target_audience=self._target_audience,
+ include_email=self._include_email,
+ quota_project_id=quota_project_id,
+ )
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def refresh(self, request):
+ from google.auth.transport.requests import AuthorizedSession
+
+ iam_sign_endpoint = _IAM_IDTOKEN_ENDPOINT.format(
+ self._target_credentials.signer_email
+ )
+
+ body = {
+ "audience": self._target_audience,
+ "delegates": self._target_credentials._delegates,
+ "includeEmail": self._include_email,
+ }
+
+ headers = {
+ "Content-Type": "application/json",
+ metrics.API_CLIENT_HEADER: metrics.token_request_id_token_impersonate(),
+ }
+
+ authed_session = AuthorizedSession(
+ self._target_credentials._source_credentials, auth_request=request
+ )
+
+ try:
+ response = authed_session.post(
+ url=iam_sign_endpoint,
+ headers=headers,
+ data=json.dumps(body).encode("utf-8"),
+ )
+ finally:
+ authed_session.close()
+
+ if response.status_code != http_client.OK:
+ raise exceptions.RefreshError(
+ "Error getting ID token: {}".format(response.json())
+ )
+
+ id_token = response.json()["token"]
+ self.token = id_token
+ self.expiry = datetime.utcfromtimestamp(
+ jwt.decode(id_token, verify=False)["exp"]
+ )
diff --git a/Lib/site-packages/google/auth/jwt.py b/Lib/site-packages/google/auth/jwt.py
new file mode 100644
index 0000000..1ebd565
--- /dev/null
+++ b/Lib/site-packages/google/auth/jwt.py
@@ -0,0 +1,878 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""JSON Web Tokens
+
+Provides support for creating (encoding) and verifying (decoding) JWTs,
+especially JWTs generated and consumed by Google infrastructure.
+
+See `rfc7519`_ for more details on JWTs.
+
+To encode a JWT use :func:`encode`::
+
+ from google.auth import crypt
+ from google.auth import jwt
+
+ signer = crypt.Signer(private_key)
+ payload = {'some': 'payload'}
+ encoded = jwt.encode(signer, payload)
+
+To decode a JWT and verify claims use :func:`decode`::
+
+ claims = jwt.decode(encoded, certs=public_certs)
+
+You can also skip verification::
+
+ claims = jwt.decode(encoded, verify=False)
+
+.. _rfc7519: https://tools.ietf.org/html/rfc7519
+
+"""
+
+try:
+ from collections.abc import Mapping
+# Python 2.7 compatibility
+except ImportError: # pragma: NO COVER
+ from collections import Mapping # type: ignore
+import copy
+import datetime
+import json
+import urllib
+
+import cachetools
+
+from google.auth import _helpers
+from google.auth import _service_account_info
+from google.auth import crypt
+from google.auth import exceptions
+import google.auth.credentials
+
+try:
+ from google.auth.crypt import es256
+except ImportError: # pragma: NO COVER
+ es256 = None # type: ignore
+
+_DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
+_DEFAULT_MAX_CACHE_SIZE = 10
+_ALGORITHM_TO_VERIFIER_CLASS = {"RS256": crypt.RSAVerifier}
+_CRYPTOGRAPHY_BASED_ALGORITHMS = frozenset(["ES256"])
+
+if es256 is not None: # pragma: NO COVER
+ _ALGORITHM_TO_VERIFIER_CLASS["ES256"] = es256.ES256Verifier # type: ignore
+
+
+def encode(signer, payload, header=None, key_id=None):
+ """Make a signed JWT.
+
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign the JWT.
+ payload (Mapping[str, str]): The JWT payload.
+ header (Mapping[str, str]): Additional JWT header payload.
+ key_id (str): The key id to add to the JWT header. If the
+ signer has a key id it will be used as the default. If this is
+ specified it will override the signer's key id.
+
+ Returns:
+ bytes: The encoded JWT.
+ """
+ if header is None:
+ header = {}
+
+ if key_id is None:
+ key_id = signer.key_id
+
+ header.update({"typ": "JWT"})
+
+ if "alg" not in header:
+ if es256 is not None and isinstance(signer, es256.ES256Signer):
+ header.update({"alg": "ES256"})
+ else:
+ header.update({"alg": "RS256"})
+
+ if key_id is not None:
+ header["kid"] = key_id
+
+ segments = [
+ _helpers.unpadded_urlsafe_b64encode(json.dumps(header).encode("utf-8")),
+ _helpers.unpadded_urlsafe_b64encode(json.dumps(payload).encode("utf-8")),
+ ]
+
+ signing_input = b".".join(segments)
+ signature = signer.sign(signing_input)
+ segments.append(_helpers.unpadded_urlsafe_b64encode(signature))
+
+ return b".".join(segments)
+
+
+def _decode_jwt_segment(encoded_section):
+ """Decodes a single JWT segment."""
+ section_bytes = _helpers.padded_urlsafe_b64decode(encoded_section)
+ try:
+ return json.loads(section_bytes.decode("utf-8"))
+ except ValueError as caught_exc:
+ new_exc = exceptions.MalformedError(
+ "Can't parse segment: {0}".format(section_bytes)
+ )
+ raise new_exc from caught_exc
+
+
+def _unverified_decode(token):
+ """Decodes a token and does no verification.
+
+ Args:
+ token (Union[str, bytes]): The encoded JWT.
+
+ Returns:
+ Tuple[Mapping, Mapping, str, str]: header, payload, signed_section, and
+ signature.
+
+ Raises:
+ google.auth.exceptions.MalformedError: if there are an incorrect amount of segments in the token or segments of the wrong type.
+ """
+ token = _helpers.to_bytes(token)
+
+ if token.count(b".") != 2:
+ raise exceptions.MalformedError(
+ "Wrong number of segments in token: {0}".format(token)
+ )
+
+ encoded_header, encoded_payload, signature = token.split(b".")
+ signed_section = encoded_header + b"." + encoded_payload
+ signature = _helpers.padded_urlsafe_b64decode(signature)
+
+ # Parse segments
+ header = _decode_jwt_segment(encoded_header)
+ payload = _decode_jwt_segment(encoded_payload)
+
+ if not isinstance(header, Mapping):
+ raise exceptions.MalformedError(
+ "Header segment should be a JSON object: {0}".format(encoded_header)
+ )
+
+ if not isinstance(payload, Mapping):
+ raise exceptions.MalformedError(
+ "Payload segment should be a JSON object: {0}".format(encoded_payload)
+ )
+
+ return header, payload, signed_section, signature
+
+
+def decode_header(token):
+ """Return the decoded header of a token.
+
+ No verification is done. This is useful to extract the key id from
+ the header in order to acquire the appropriate certificate to verify
+ the token.
+
+ Args:
+ token (Union[str, bytes]): the encoded JWT.
+
+ Returns:
+ Mapping: The decoded JWT header.
+ """
+ header, _, _, _ = _unverified_decode(token)
+ return header
+
+
+def _verify_iat_and_exp(payload, clock_skew_in_seconds=0):
+ """Verifies the ``iat`` (Issued At) and ``exp`` (Expires) claims in a token
+ payload.
+
+ Args:
+ payload (Mapping[str, str]): The JWT payload.
+ clock_skew_in_seconds (int): The clock skew used for `iat` and `exp`
+ validation.
+
+ Raises:
+ google.auth.exceptions.InvalidValue: if value validation failed.
+ google.auth.exceptions.MalformedError: if schema validation failed.
+ """
+ now = _helpers.datetime_to_secs(_helpers.utcnow())
+
+ # Make sure the iat and exp claims are present.
+ for key in ("iat", "exp"):
+ if key not in payload:
+ raise exceptions.MalformedError(
+ "Token does not contain required claim {}".format(key)
+ )
+
+ # Make sure the token wasn't issued in the future.
+ iat = payload["iat"]
+ # Err on the side of accepting a token that is slightly early to account
+ # for clock skew.
+ earliest = iat - clock_skew_in_seconds
+ if now < earliest:
+ raise exceptions.InvalidValue(
+ "Token used too early, {} < {}. Check that your computer's clock is set correctly.".format(
+ now, iat
+ )
+ )
+
+ # Make sure the token wasn't issued in the past.
+ exp = payload["exp"]
+ # Err on the side of accepting a token that is slightly out of date
+ # to account for clow skew.
+ latest = exp + clock_skew_in_seconds
+ if latest < now:
+ raise exceptions.InvalidValue("Token expired, {} < {}".format(latest, now))
+
+
+def decode(token, certs=None, verify=True, audience=None, clock_skew_in_seconds=0):
+ """Decode and verify a JWT.
+
+ Args:
+ token (str): The encoded JWT.
+ certs (Union[str, bytes, Mapping[str, Union[str, bytes]]]): The
+ certificate used to validate the JWT signature. If bytes or string,
+ it must the the public key certificate in PEM format. If a mapping,
+ it must be a mapping of key IDs to public key certificates in PEM
+ format. The mapping must contain the same key ID that's specified
+ in the token's header.
+ verify (bool): Whether to perform signature and claim validation.
+ Verification is done by default.
+ audience (str or list): The audience claim, 'aud', that this JWT should
+ contain. Or a list of audience claims. If None then the JWT's 'aud'
+ parameter is not verified.
+ clock_skew_in_seconds (int): The clock skew used for `iat` and `exp`
+ validation.
+
+ Returns:
+ Mapping[str, str]: The deserialized JSON payload in the JWT.
+
+ Raises:
+ google.auth.exceptions.InvalidValue: if value validation failed.
+ google.auth.exceptions.MalformedError: if schema validation failed.
+ """
+ header, payload, signed_section, signature = _unverified_decode(token)
+
+ if not verify:
+ return payload
+
+ # Pluck the key id and algorithm from the header and make sure we have
+ # a verifier that can support it.
+ key_alg = header.get("alg")
+ key_id = header.get("kid")
+
+ try:
+ verifier_cls = _ALGORITHM_TO_VERIFIER_CLASS[key_alg]
+ except KeyError as exc:
+ if key_alg in _CRYPTOGRAPHY_BASED_ALGORITHMS:
+ raise exceptions.InvalidValue(
+ "The key algorithm {} requires the cryptography package to be installed.".format(
+ key_alg
+ )
+ ) from exc
+ else:
+ raise exceptions.InvalidValue(
+ "Unsupported signature algorithm {}".format(key_alg)
+ ) from exc
+ # If certs is specified as a dictionary of key IDs to certificates, then
+ # use the certificate identified by the key ID in the token header.
+ if isinstance(certs, Mapping):
+ if key_id:
+ if key_id not in certs:
+ raise exceptions.MalformedError(
+ "Certificate for key id {} not found.".format(key_id)
+ )
+ certs_to_check = [certs[key_id]]
+ # If there's no key id in the header, check against all of the certs.
+ else:
+ certs_to_check = certs.values()
+ else:
+ certs_to_check = certs
+
+ # Verify that the signature matches the message.
+ if not crypt.verify_signature(
+ signed_section, signature, certs_to_check, verifier_cls
+ ):
+ raise exceptions.MalformedError("Could not verify token signature.")
+
+ # Verify the issued at and created times in the payload.
+ _verify_iat_and_exp(payload, clock_skew_in_seconds)
+
+ # Check audience.
+ if audience is not None:
+ claim_audience = payload.get("aud")
+ if isinstance(audience, str):
+ audience = [audience]
+ if claim_audience not in audience:
+ raise exceptions.InvalidValue(
+ "Token has wrong audience {}, expected one of {}".format(
+ claim_audience, audience
+ )
+ )
+
+ return payload
+
+
+class Credentials(
+ google.auth.credentials.Signing, google.auth.credentials.CredentialsWithQuotaProject
+):
+ """Credentials that use a JWT as the bearer token.
+
+ These credentials require an "audience" claim. This claim identifies the
+ intended recipient of the bearer token.
+
+ The constructor arguments determine the claims for the JWT that is
+ sent with requests. Usually, you'll construct these credentials with
+ one of the helper constructors as shown in the next section.
+
+ To create JWT credentials using a Google service account private key
+ JSON file::
+
+ audience = 'https://pubsub.googleapis.com/google.pubsub.v1.Publisher'
+ credentials = jwt.Credentials.from_service_account_file(
+ 'service-account.json',
+ audience=audience)
+
+ If you already have the service account file loaded and parsed::
+
+ service_account_info = json.load(open('service_account.json'))
+ credentials = jwt.Credentials.from_service_account_info(
+ service_account_info,
+ audience=audience)
+
+ Both helper methods pass on arguments to the constructor, so you can
+ specify the JWT claims::
+
+ credentials = jwt.Credentials.from_service_account_file(
+ 'service-account.json',
+ audience=audience,
+ additional_claims={'meta': 'data'})
+
+ You can also construct the credentials directly if you have a
+ :class:`~google.auth.crypt.Signer` instance::
+
+ credentials = jwt.Credentials(
+ signer,
+ issuer='your-issuer',
+ subject='your-subject',
+ audience=audience)
+
+ The claims are considered immutable. If you want to modify the claims,
+ you can easily create another instance using :meth:`with_claims`::
+
+ new_audience = (
+ 'https://pubsub.googleapis.com/google.pubsub.v1.Subscriber')
+ new_credentials = credentials.with_claims(audience=new_audience)
+ """
+
+ def __init__(
+ self,
+ signer,
+ issuer,
+ subject,
+ audience,
+ additional_claims=None,
+ token_lifetime=_DEFAULT_TOKEN_LIFETIME_SECS,
+ quota_project_id=None,
+ ):
+ """
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ issuer (str): The `iss` claim.
+ subject (str): The `sub` claim.
+ audience (str): the `aud` claim. The intended audience for the
+ credentials.
+ additional_claims (Mapping[str, str]): Any additional claims for
+ the JWT payload.
+ token_lifetime (int): The amount of time in seconds for
+ which the token is valid. Defaults to 1 hour.
+ quota_project_id (Optional[str]): The project ID used for quota
+ and billing.
+ """
+ super(Credentials, self).__init__()
+ self._signer = signer
+ self._issuer = issuer
+ self._subject = subject
+ self._audience = audience
+ self._token_lifetime = token_lifetime
+ self._quota_project_id = quota_project_id
+
+ if additional_claims is None:
+ additional_claims = {}
+
+ self._additional_claims = additional_claims
+
+ @classmethod
+ def _from_signer_and_info(cls, signer, info, **kwargs):
+ """Creates a Credentials instance from a signer and service account
+ info.
+
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ info (Mapping[str, str]): The service account info.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.Credentials: The constructed credentials.
+
+ Raises:
+ google.auth.exceptions.MalformedError: If the info is not in the expected format.
+ """
+ kwargs.setdefault("subject", info["client_email"])
+ kwargs.setdefault("issuer", info["client_email"])
+ return cls(signer, **kwargs)
+
+ @classmethod
+ def from_service_account_info(cls, info, **kwargs):
+ """Creates an Credentials instance from a dictionary.
+
+ Args:
+ info (Mapping[str, str]): The service account info in Google
+ format.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.Credentials: The constructed credentials.
+
+ Raises:
+ google.auth.exceptions.MalformedError: If the info is not in the expected format.
+ """
+ signer = _service_account_info.from_dict(info, require=["client_email"])
+ return cls._from_signer_and_info(signer, info, **kwargs)
+
+ @classmethod
+ def from_service_account_file(cls, filename, **kwargs):
+ """Creates a Credentials instance from a service account .json file
+ in Google format.
+
+ Args:
+ filename (str): The path to the service account .json file.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.Credentials: The constructed credentials.
+ """
+ info, signer = _service_account_info.from_filename(
+ filename, require=["client_email"]
+ )
+ return cls._from_signer_and_info(signer, info, **kwargs)
+
+ @classmethod
+ def from_signing_credentials(cls, credentials, audience, **kwargs):
+ """Creates a new :class:`google.auth.jwt.Credentials` instance from an
+ existing :class:`google.auth.credentials.Signing` instance.
+
+ The new instance will use the same signer as the existing instance and
+ will use the existing instance's signer email as the issuer and
+ subject by default.
+
+ Example::
+
+ svc_creds = service_account.Credentials.from_service_account_file(
+ 'service_account.json')
+ audience = (
+ 'https://pubsub.googleapis.com/google.pubsub.v1.Publisher')
+ jwt_creds = jwt.Credentials.from_signing_credentials(
+ svc_creds, audience=audience)
+
+ Args:
+ credentials (google.auth.credentials.Signing): The credentials to
+ use to construct the new credentials.
+ audience (str): the `aud` claim. The intended audience for the
+ credentials.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.Credentials: A new Credentials instance.
+ """
+ kwargs.setdefault("issuer", credentials.signer_email)
+ kwargs.setdefault("subject", credentials.signer_email)
+ return cls(credentials.signer, audience=audience, **kwargs)
+
+ def with_claims(
+ self, issuer=None, subject=None, audience=None, additional_claims=None
+ ):
+ """Returns a copy of these credentials with modified claims.
+
+ Args:
+ issuer (str): The `iss` claim. If unspecified the current issuer
+ claim will be used.
+ subject (str): The `sub` claim. If unspecified the current subject
+ claim will be used.
+ audience (str): the `aud` claim. If unspecified the current
+ audience claim will be used.
+ additional_claims (Mapping[str, str]): Any additional claims for
+ the JWT payload. This will be merged with the current
+ additional claims.
+
+ Returns:
+ google.auth.jwt.Credentials: A new credentials instance.
+ """
+ new_additional_claims = copy.deepcopy(self._additional_claims)
+ new_additional_claims.update(additional_claims or {})
+
+ return self.__class__(
+ self._signer,
+ issuer=issuer if issuer is not None else self._issuer,
+ subject=subject if subject is not None else self._subject,
+ audience=audience if audience is not None else self._audience,
+ additional_claims=new_additional_claims,
+ quota_project_id=self._quota_project_id,
+ )
+
+ @_helpers.copy_docstring(google.auth.credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ return self.__class__(
+ self._signer,
+ issuer=self._issuer,
+ subject=self._subject,
+ audience=self._audience,
+ additional_claims=self._additional_claims,
+ quota_project_id=quota_project_id,
+ )
+
+ def _make_jwt(self):
+ """Make a signed JWT.
+
+ Returns:
+ Tuple[bytes, datetime]: The encoded JWT and the expiration.
+ """
+ now = _helpers.utcnow()
+ lifetime = datetime.timedelta(seconds=self._token_lifetime)
+ expiry = now + lifetime
+
+ payload = {
+ "iss": self._issuer,
+ "sub": self._subject,
+ "iat": _helpers.datetime_to_secs(now),
+ "exp": _helpers.datetime_to_secs(expiry),
+ }
+ if self._audience:
+ payload["aud"] = self._audience
+
+ payload.update(self._additional_claims)
+
+ jwt = encode(self._signer, payload)
+
+ return jwt, expiry
+
+ def refresh(self, request):
+ """Refreshes the access token.
+
+ Args:
+ request (Any): Unused.
+ """
+ # pylint: disable=unused-argument
+ # (pylint doesn't correctly recognize overridden methods.)
+ self.token, self.expiry = self._make_jwt()
+
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
+ def sign_bytes(self, message):
+ return self._signer.sign(message)
+
+ @property # type: ignore
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
+ def signer_email(self):
+ return self._issuer
+
+ @property # type: ignore
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
+ def signer(self):
+ return self._signer
+
+ @property # type: ignore
+ def additional_claims(self):
+ """ Additional claims the JWT object was created with."""
+ return self._additional_claims
+
+
+class OnDemandCredentials(
+ google.auth.credentials.Signing, google.auth.credentials.CredentialsWithQuotaProject
+):
+ """On-demand JWT credentials.
+
+ Like :class:`Credentials`, this class uses a JWT as the bearer token for
+ authentication. However, this class does not require the audience at
+ construction time. Instead, it will generate a new token on-demand for
+ each request using the request URI as the audience. It caches tokens
+ so that multiple requests to the same URI do not incur the overhead
+ of generating a new token every time.
+
+ This behavior is especially useful for `gRPC`_ clients. A gRPC service may
+ have multiple audience and gRPC clients may not know all of the audiences
+ required for accessing a particular service. With these credentials,
+ no knowledge of the audiences is required ahead of time.
+
+ .. _grpc: http://www.grpc.io/
+ """
+
+ def __init__(
+ self,
+ signer,
+ issuer,
+ subject,
+ additional_claims=None,
+ token_lifetime=_DEFAULT_TOKEN_LIFETIME_SECS,
+ max_cache_size=_DEFAULT_MAX_CACHE_SIZE,
+ quota_project_id=None,
+ ):
+ """
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ issuer (str): The `iss` claim.
+ subject (str): The `sub` claim.
+ additional_claims (Mapping[str, str]): Any additional claims for
+ the JWT payload.
+ token_lifetime (int): The amount of time in seconds for
+ which the token is valid. Defaults to 1 hour.
+ max_cache_size (int): The maximum number of JWT tokens to keep in
+ cache. Tokens are cached using :class:`cachetools.LRUCache`.
+ quota_project_id (Optional[str]): The project ID used for quota
+ and billing.
+
+ """
+ super(OnDemandCredentials, self).__init__()
+ self._signer = signer
+ self._issuer = issuer
+ self._subject = subject
+ self._token_lifetime = token_lifetime
+ self._quota_project_id = quota_project_id
+
+ if additional_claims is None:
+ additional_claims = {}
+
+ self._additional_claims = additional_claims
+ self._cache = cachetools.LRUCache(maxsize=max_cache_size)
+
+ @classmethod
+ def _from_signer_and_info(cls, signer, info, **kwargs):
+ """Creates an OnDemandCredentials instance from a signer and service
+ account info.
+
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ info (Mapping[str, str]): The service account info.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.OnDemandCredentials: The constructed credentials.
+
+ Raises:
+ google.auth.exceptions.MalformedError: If the info is not in the expected format.
+ """
+ kwargs.setdefault("subject", info["client_email"])
+ kwargs.setdefault("issuer", info["client_email"])
+ return cls(signer, **kwargs)
+
+ @classmethod
+ def from_service_account_info(cls, info, **kwargs):
+ """Creates an OnDemandCredentials instance from a dictionary.
+
+ Args:
+ info (Mapping[str, str]): The service account info in Google
+ format.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.OnDemandCredentials: The constructed credentials.
+
+ Raises:
+ google.auth.exceptions.MalformedError: If the info is not in the expected format.
+ """
+ signer = _service_account_info.from_dict(info, require=["client_email"])
+ return cls._from_signer_and_info(signer, info, **kwargs)
+
+ @classmethod
+ def from_service_account_file(cls, filename, **kwargs):
+ """Creates an OnDemandCredentials instance from a service account .json
+ file in Google format.
+
+ Args:
+ filename (str): The path to the service account .json file.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.OnDemandCredentials: The constructed credentials.
+ """
+ info, signer = _service_account_info.from_filename(
+ filename, require=["client_email"]
+ )
+ return cls._from_signer_and_info(signer, info, **kwargs)
+
+ @classmethod
+ def from_signing_credentials(cls, credentials, **kwargs):
+ """Creates a new :class:`google.auth.jwt.OnDemandCredentials` instance
+ from an existing :class:`google.auth.credentials.Signing` instance.
+
+ The new instance will use the same signer as the existing instance and
+ will use the existing instance's signer email as the issuer and
+ subject by default.
+
+ Example::
+
+ svc_creds = service_account.Credentials.from_service_account_file(
+ 'service_account.json')
+ jwt_creds = jwt.OnDemandCredentials.from_signing_credentials(
+ svc_creds)
+
+ Args:
+ credentials (google.auth.credentials.Signing): The credentials to
+ use to construct the new credentials.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.Credentials: A new Credentials instance.
+ """
+ kwargs.setdefault("issuer", credentials.signer_email)
+ kwargs.setdefault("subject", credentials.signer_email)
+ return cls(credentials.signer, **kwargs)
+
+ def with_claims(self, issuer=None, subject=None, additional_claims=None):
+ """Returns a copy of these credentials with modified claims.
+
+ Args:
+ issuer (str): The `iss` claim. If unspecified the current issuer
+ claim will be used.
+ subject (str): The `sub` claim. If unspecified the current subject
+ claim will be used.
+ additional_claims (Mapping[str, str]): Any additional claims for
+ the JWT payload. This will be merged with the current
+ additional claims.
+
+ Returns:
+ google.auth.jwt.OnDemandCredentials: A new credentials instance.
+ """
+ new_additional_claims = copy.deepcopy(self._additional_claims)
+ new_additional_claims.update(additional_claims or {})
+
+ return self.__class__(
+ self._signer,
+ issuer=issuer if issuer is not None else self._issuer,
+ subject=subject if subject is not None else self._subject,
+ additional_claims=new_additional_claims,
+ max_cache_size=self._cache.maxsize,
+ quota_project_id=self._quota_project_id,
+ )
+
+ @_helpers.copy_docstring(google.auth.credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+
+ return self.__class__(
+ self._signer,
+ issuer=self._issuer,
+ subject=self._subject,
+ additional_claims=self._additional_claims,
+ max_cache_size=self._cache.maxsize,
+ quota_project_id=quota_project_id,
+ )
+
+ @property
+ def valid(self):
+ """Checks the validity of the credentials.
+
+ These credentials are always valid because it generates tokens on
+ demand.
+ """
+ return True
+
+ def _make_jwt_for_audience(self, audience):
+ """Make a new JWT for the given audience.
+
+ Args:
+ audience (str): The intended audience.
+
+ Returns:
+ Tuple[bytes, datetime]: The encoded JWT and the expiration.
+ """
+ now = _helpers.utcnow()
+ lifetime = datetime.timedelta(seconds=self._token_lifetime)
+ expiry = now + lifetime
+
+ payload = {
+ "iss": self._issuer,
+ "sub": self._subject,
+ "iat": _helpers.datetime_to_secs(now),
+ "exp": _helpers.datetime_to_secs(expiry),
+ "aud": audience,
+ }
+
+ payload.update(self._additional_claims)
+
+ jwt = encode(self._signer, payload)
+
+ return jwt, expiry
+
+ def _get_jwt_for_audience(self, audience):
+ """Get a JWT For a given audience.
+
+ If there is already an existing, non-expired token in the cache for
+ the audience, that token is used. Otherwise, a new token will be
+ created.
+
+ Args:
+ audience (str): The intended audience.
+
+ Returns:
+ bytes: The encoded JWT.
+ """
+ token, expiry = self._cache.get(audience, (None, None))
+
+ if token is None or expiry < _helpers.utcnow():
+ token, expiry = self._make_jwt_for_audience(audience)
+ self._cache[audience] = token, expiry
+
+ return token
+
+ def refresh(self, request):
+ """Raises an exception, these credentials can not be directly
+ refreshed.
+
+ Args:
+ request (Any): Unused.
+
+ Raises:
+ google.auth.RefreshError
+ """
+ # pylint: disable=unused-argument
+ # (pylint doesn't correctly recognize overridden methods.)
+ raise exceptions.RefreshError(
+ "OnDemandCredentials can not be directly refreshed."
+ )
+
+ def before_request(self, request, method, url, headers):
+ """Performs credential-specific before request logic.
+
+ Args:
+ request (Any): Unused. JWT credentials do not need to make an
+ HTTP request to refresh.
+ method (str): The request's HTTP method.
+ url (str): The request's URI. This is used as the audience claim
+ when generating the JWT.
+ headers (Mapping): The request's headers.
+ """
+ # pylint: disable=unused-argument
+ # (pylint doesn't correctly recognize overridden methods.)
+ parts = urllib.parse.urlsplit(url)
+ # Strip query string and fragment
+ audience = urllib.parse.urlunsplit(
+ (parts.scheme, parts.netloc, parts.path, "", "")
+ )
+ token = self._get_jwt_for_audience(audience)
+ self.apply(headers, token=token)
+
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
+ def sign_bytes(self, message):
+ return self._signer.sign(message)
+
+ @property # type: ignore
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
+ def signer_email(self):
+ return self._issuer
+
+ @property # type: ignore
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
+ def signer(self):
+ return self._signer
diff --git a/Lib/site-packages/google/auth/metrics.py b/Lib/site-packages/google/auth/metrics.py
new file mode 100644
index 0000000..11e4b07
--- /dev/null
+++ b/Lib/site-packages/google/auth/metrics.py
@@ -0,0 +1,154 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" We use x-goog-api-client header to report metrics. This module provides
+the constants and helper methods to construct x-goog-api-client header.
+"""
+
+import platform
+
+from google.auth import version
+
+
+API_CLIENT_HEADER = "x-goog-api-client"
+
+# BYOID Specific consts
+BYOID_HEADER_SECTION = "google-byoid-sdk"
+
+# Auth request type
+REQUEST_TYPE_ACCESS_TOKEN = "auth-request-type/at"
+REQUEST_TYPE_ID_TOKEN = "auth-request-type/it"
+REQUEST_TYPE_MDS_PING = "auth-request-type/mds"
+REQUEST_TYPE_REAUTH_START = "auth-request-type/re-start"
+REQUEST_TYPE_REAUTH_CONTINUE = "auth-request-type/re-cont"
+
+# Credential type
+CRED_TYPE_USER = "cred-type/u"
+CRED_TYPE_SA_ASSERTION = "cred-type/sa"
+CRED_TYPE_SA_JWT = "cred-type/jwt"
+CRED_TYPE_SA_MDS = "cred-type/mds"
+CRED_TYPE_SA_IMPERSONATE = "cred-type/imp"
+
+
+# Versions
+def python_and_auth_lib_version():
+ return "gl-python/{} auth/{}".format(platform.python_version(), version.__version__)
+
+
+# Token request metric header values
+
+# x-goog-api-client header value for access token request via metadata server.
+# Example: "gl-python/3.7 auth/1.1 auth-request-type/at cred-type/mds"
+def token_request_access_token_mds():
+ return "{} {} {}".format(
+ python_and_auth_lib_version(), REQUEST_TYPE_ACCESS_TOKEN, CRED_TYPE_SA_MDS
+ )
+
+
+# x-goog-api-client header value for ID token request via metadata server.
+# Example: "gl-python/3.7 auth/1.1 auth-request-type/it cred-type/mds"
+def token_request_id_token_mds():
+ return "{} {} {}".format(
+ python_and_auth_lib_version(), REQUEST_TYPE_ID_TOKEN, CRED_TYPE_SA_MDS
+ )
+
+
+# x-goog-api-client header value for impersonated credentials access token request.
+# Example: "gl-python/3.7 auth/1.1 auth-request-type/at cred-type/imp"
+def token_request_access_token_impersonate():
+ return "{} {} {}".format(
+ python_and_auth_lib_version(),
+ REQUEST_TYPE_ACCESS_TOKEN,
+ CRED_TYPE_SA_IMPERSONATE,
+ )
+
+
+# x-goog-api-client header value for impersonated credentials ID token request.
+# Example: "gl-python/3.7 auth/1.1 auth-request-type/it cred-type/imp"
+def token_request_id_token_impersonate():
+ return "{} {} {}".format(
+ python_and_auth_lib_version(), REQUEST_TYPE_ID_TOKEN, CRED_TYPE_SA_IMPERSONATE
+ )
+
+
+# x-goog-api-client header value for service account credentials access token
+# request (assertion flow).
+# Example: "gl-python/3.7 auth/1.1 auth-request-type/at cred-type/sa"
+def token_request_access_token_sa_assertion():
+ return "{} {} {}".format(
+ python_and_auth_lib_version(), REQUEST_TYPE_ACCESS_TOKEN, CRED_TYPE_SA_ASSERTION
+ )
+
+
+# x-goog-api-client header value for service account credentials ID token
+# request (assertion flow).
+# Example: "gl-python/3.7 auth/1.1 auth-request-type/it cred-type/sa"
+def token_request_id_token_sa_assertion():
+ return "{} {} {}".format(
+ python_and_auth_lib_version(), REQUEST_TYPE_ID_TOKEN, CRED_TYPE_SA_ASSERTION
+ )
+
+
+# x-goog-api-client header value for user credentials token request.
+# Example: "gl-python/3.7 auth/1.1 cred-type/u"
+def token_request_user():
+ return "{} {}".format(python_and_auth_lib_version(), CRED_TYPE_USER)
+
+
+# Miscellenous metrics
+
+# x-goog-api-client header value for metadata server ping.
+# Example: "gl-python/3.7 auth/1.1 auth-request-type/mds"
+def mds_ping():
+ return "{} {}".format(python_and_auth_lib_version(), REQUEST_TYPE_MDS_PING)
+
+
+# x-goog-api-client header value for reauth start endpoint calls.
+# Example: "gl-python/3.7 auth/1.1 auth-request-type/re-start"
+def reauth_start():
+ return "{} {}".format(python_and_auth_lib_version(), REQUEST_TYPE_REAUTH_START)
+
+
+# x-goog-api-client header value for reauth continue endpoint calls.
+# Example: "gl-python/3.7 auth/1.1 cred-type/re-cont"
+def reauth_continue():
+ return "{} {}".format(python_and_auth_lib_version(), REQUEST_TYPE_REAUTH_CONTINUE)
+
+
+# x-goog-api-client header value for BYOID calls to the Security Token Service exchange token endpoint.
+# Example: "gl-python/3.7 auth/1.1 google-byoid-sdk source/aws sa-impersonation/true sa-impersonation/true"
+def byoid_metrics_header(metrics_options):
+ header = "{} {}".format(python_and_auth_lib_version(), BYOID_HEADER_SECTION)
+ for key, value in metrics_options.items():
+ header = "{} {}/{}".format(header, key, value)
+ return header
+
+
+def add_metric_header(headers, metric_header_value):
+ """Add x-goog-api-client header with the given value.
+
+ Args:
+ headers (Mapping[str, str]): The headers to which we will add the
+ metric header.
+ metric_header_value (Optional[str]): If value is None, do nothing;
+ if headers already has a x-goog-api-client header, append the value
+ to the existing header; otherwise add a new x-goog-api-client
+ header with the given value.
+ """
+ if not metric_header_value:
+ return
+ if API_CLIENT_HEADER not in headers:
+ headers[API_CLIENT_HEADER] = metric_header_value
+ else:
+ headers[API_CLIENT_HEADER] += " " + metric_header_value
diff --git a/Lib/site-packages/google/auth/pluggable.py b/Lib/site-packages/google/auth/pluggable.py
new file mode 100644
index 0000000..53b4eac
--- /dev/null
+++ b/Lib/site-packages/google/auth/pluggable.py
@@ -0,0 +1,429 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Pluggable Credentials.
+Pluggable Credentials are initialized using external_account arguments which
+are typically loaded from third-party executables. Unlike other
+credentials that can be initialized with a list of explicit arguments, secrets
+or credentials, external account clients use the environment and hints/guidelines
+provided by the external_account JSON file to retrieve credentials and exchange
+them for Google access tokens.
+
+Example credential_source for pluggable credential:
+{
+ "executable": {
+ "command": "/path/to/get/credentials.sh --arg1=value1 --arg2=value2",
+ "timeout_millis": 5000,
+ "output_file": "/path/to/generated/cached/credentials"
+ }
+}
+"""
+
+try:
+ from collections.abc import Mapping
+# Python 2.7 compatibility
+except ImportError: # pragma: NO COVER
+ from collections import Mapping
+import json
+import os
+import subprocess
+import sys
+import time
+
+from google.auth import _helpers
+from google.auth import exceptions
+from google.auth import external_account
+
+# The max supported executable spec version.
+EXECUTABLE_SUPPORTED_MAX_VERSION = 1
+
+EXECUTABLE_TIMEOUT_MILLIS_DEFAULT = 30 * 1000 # 30 seconds
+EXECUTABLE_TIMEOUT_MILLIS_LOWER_BOUND = 5 * 1000 # 5 seconds
+EXECUTABLE_TIMEOUT_MILLIS_UPPER_BOUND = 120 * 1000 # 2 minutes
+
+EXECUTABLE_INTERACTIVE_TIMEOUT_MILLIS_LOWER_BOUND = 30 * 1000 # 30 seconds
+EXECUTABLE_INTERACTIVE_TIMEOUT_MILLIS_UPPER_BOUND = 30 * 60 * 1000 # 30 minutes
+
+
+class Credentials(external_account.Credentials):
+ """External account credentials sourced from executables."""
+
+ def __init__(
+ self,
+ audience,
+ subject_token_type,
+ token_url,
+ credential_source,
+ *args,
+ **kwargs
+ ):
+ """Instantiates an external account credentials object from a executables.
+
+ Args:
+ audience (str): The STS audience field.
+ subject_token_type (str): The subject token type.
+ token_url (str): The STS endpoint URL.
+ credential_source (Mapping): The credential source dictionary used to
+ provide instructions on how to retrieve external credential to be
+ exchanged for Google access tokens.
+
+ Example credential_source for pluggable credential:
+
+ {
+ "executable": {
+ "command": "/path/to/get/credentials.sh --arg1=value1 --arg2=value2",
+ "timeout_millis": 5000,
+ "output_file": "/path/to/generated/cached/credentials"
+ }
+ }
+ args (List): Optional positional arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
+ kwargs (Mapping): Optional keyword arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If an error is encountered during
+ access token retrieval logic.
+ google.auth.exceptions.InvalidValue: For invalid parameters.
+ google.auth.exceptions.MalformedError: For invalid parameters.
+
+ .. note:: Typically one of the helper constructors
+ :meth:`from_file` or
+ :meth:`from_info` are used instead of calling the constructor directly.
+ """
+
+ self.interactive = kwargs.pop("interactive", False)
+ super(Credentials, self).__init__(
+ audience=audience,
+ subject_token_type=subject_token_type,
+ token_url=token_url,
+ credential_source=credential_source,
+ *args,
+ **kwargs
+ )
+ if not isinstance(credential_source, Mapping):
+ self._credential_source_executable = None
+ raise exceptions.MalformedError(
+ "Missing credential_source. The credential_source is not a dict."
+ )
+ self._credential_source_executable = credential_source.get("executable")
+ if not self._credential_source_executable:
+ raise exceptions.MalformedError(
+ "Missing credential_source. An 'executable' must be provided."
+ )
+ self._credential_source_executable_command = self._credential_source_executable.get(
+ "command"
+ )
+ self._credential_source_executable_timeout_millis = self._credential_source_executable.get(
+ "timeout_millis"
+ )
+ self._credential_source_executable_interactive_timeout_millis = self._credential_source_executable.get(
+ "interactive_timeout_millis"
+ )
+ self._credential_source_executable_output_file = self._credential_source_executable.get(
+ "output_file"
+ )
+
+ # Dummy value. This variable is only used via injection, not exposed to ctor
+ self._tokeninfo_username = ""
+
+ if not self._credential_source_executable_command:
+ raise exceptions.MalformedError(
+ "Missing command field. Executable command must be provided."
+ )
+ if not self._credential_source_executable_timeout_millis:
+ self._credential_source_executable_timeout_millis = (
+ EXECUTABLE_TIMEOUT_MILLIS_DEFAULT
+ )
+ elif (
+ self._credential_source_executable_timeout_millis
+ < EXECUTABLE_TIMEOUT_MILLIS_LOWER_BOUND
+ or self._credential_source_executable_timeout_millis
+ > EXECUTABLE_TIMEOUT_MILLIS_UPPER_BOUND
+ ):
+ raise exceptions.InvalidValue("Timeout must be between 5 and 120 seconds.")
+
+ if self._credential_source_executable_interactive_timeout_millis:
+ if (
+ self._credential_source_executable_interactive_timeout_millis
+ < EXECUTABLE_INTERACTIVE_TIMEOUT_MILLIS_LOWER_BOUND
+ or self._credential_source_executable_interactive_timeout_millis
+ > EXECUTABLE_INTERACTIVE_TIMEOUT_MILLIS_UPPER_BOUND
+ ):
+ raise exceptions.InvalidValue(
+ "Interactive timeout must be between 30 seconds and 30 minutes."
+ )
+
+ @_helpers.copy_docstring(external_account.Credentials)
+ def retrieve_subject_token(self, request):
+ self._validate_running_mode()
+
+ # Check output file.
+ if self._credential_source_executable_output_file is not None:
+ try:
+ with open(
+ self._credential_source_executable_output_file, encoding="utf-8"
+ ) as output_file:
+ response = json.load(output_file)
+ except Exception:
+ pass
+ else:
+ try:
+ # If the cached response is expired, _parse_subject_token will raise an error which will be ignored and we will call the executable again.
+ subject_token = self._parse_subject_token(response)
+ if (
+ "expiration_time" not in response
+ ): # Always treat missing expiration_time as expired and proceed to executable run.
+ raise exceptions.RefreshError
+ except (exceptions.MalformedError, exceptions.InvalidValue):
+ raise
+ except exceptions.RefreshError:
+ pass
+ else:
+ return subject_token
+
+ if not _helpers.is_python_3():
+ raise exceptions.RefreshError(
+ "Pluggable auth is only supported for python 3.7+"
+ )
+
+ # Inject env vars.
+ env = os.environ.copy()
+ self._inject_env_variables(env)
+ env["GOOGLE_EXTERNAL_ACCOUNT_REVOKE"] = "0"
+
+ # Run executable.
+ exe_timeout = (
+ self._credential_source_executable_interactive_timeout_millis / 1000
+ if self.interactive
+ else self._credential_source_executable_timeout_millis / 1000
+ )
+ exe_stdin = sys.stdin if self.interactive else None
+ exe_stdout = sys.stdout if self.interactive else subprocess.PIPE
+ exe_stderr = sys.stdout if self.interactive else subprocess.STDOUT
+
+ result = subprocess.run(
+ self._credential_source_executable_command.split(),
+ timeout=exe_timeout,
+ stdin=exe_stdin,
+ stdout=exe_stdout,
+ stderr=exe_stderr,
+ env=env,
+ )
+ if result.returncode != 0:
+ raise exceptions.RefreshError(
+ "Executable exited with non-zero return code {}. Error: {}".format(
+ result.returncode, result.stdout
+ )
+ )
+
+ # Handle executable output.
+ response = json.loads(result.stdout.decode("utf-8")) if result.stdout else None
+ if not response and self._credential_source_executable_output_file is not None:
+ response = json.load(
+ open(self._credential_source_executable_output_file, encoding="utf-8")
+ )
+
+ subject_token = self._parse_subject_token(response)
+ return subject_token
+
+ def revoke(self, request):
+ """Revokes the subject token using the credential_source object.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ Raises:
+ google.auth.exceptions.RefreshError: If the executable revocation
+ not properly executed.
+
+ """
+ if not self.interactive:
+ raise exceptions.InvalidValue(
+ "Revoke is only enabled under interactive mode."
+ )
+ self._validate_running_mode()
+
+ if not _helpers.is_python_3():
+ raise exceptions.RefreshError(
+ "Pluggable auth is only supported for python 3.7+"
+ )
+
+ # Inject variables
+ env = os.environ.copy()
+ self._inject_env_variables(env)
+ env["GOOGLE_EXTERNAL_ACCOUNT_REVOKE"] = "1"
+
+ # Run executable
+ result = subprocess.run(
+ self._credential_source_executable_command.split(),
+ timeout=self._credential_source_executable_interactive_timeout_millis
+ / 1000,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ env=env,
+ )
+
+ if result.returncode != 0:
+ raise exceptions.RefreshError(
+ "Auth revoke failed on executable. Exit with non-zero return code {}. Error: {}".format(
+ result.returncode, result.stdout
+ )
+ )
+
+ response = json.loads(result.stdout.decode("utf-8"))
+ self._validate_revoke_response(response)
+
+ @property
+ def external_account_id(self):
+ """Returns the external account identifier.
+
+ When service account impersonation is used the identifier is the service
+ account email.
+
+ Without service account impersonation, this returns None, unless it is
+ being used by the Google Cloud CLI which populates this field.
+ """
+
+ return self.service_account_email or self._tokeninfo_username
+
+ @classmethod
+ def from_info(cls, info, **kwargs):
+ """Creates a Pluggable Credentials instance from parsed external account info.
+
+ Args:
+ info (Mapping[str, str]): The Pluggable external account info in Google
+ format.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.pluggable.Credentials: The constructed
+ credentials.
+
+ Raises:
+ google.auth.exceptions.InvalidValue: For invalid parameters.
+ google.auth.exceptions.MalformedError: For invalid parameters.
+ """
+ return super(Credentials, cls).from_info(info, **kwargs)
+
+ @classmethod
+ def from_file(cls, filename, **kwargs):
+ """Creates an Pluggable Credentials instance from an external account json file.
+
+ Args:
+ filename (str): The path to the Pluggable external account json file.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.pluggable.Credentials: The constructed
+ credentials.
+ """
+ return super(Credentials, cls).from_file(filename, **kwargs)
+
+ def _inject_env_variables(self, env):
+ env["GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE"] = self._audience
+ env["GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE"] = self._subject_token_type
+ env["GOOGLE_EXTERNAL_ACCOUNT_ID"] = self.external_account_id
+ env["GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE"] = "1" if self.interactive else "0"
+
+ if self._service_account_impersonation_url is not None:
+ env[
+ "GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL"
+ ] = self.service_account_email
+ if self._credential_source_executable_output_file is not None:
+ env[
+ "GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE"
+ ] = self._credential_source_executable_output_file
+
+ def _parse_subject_token(self, response):
+ self._validate_response_schema(response)
+ if not response["success"]:
+ if "code" not in response or "message" not in response:
+ raise exceptions.MalformedError(
+ "Error code and message fields are required in the response."
+ )
+ raise exceptions.RefreshError(
+ "Executable returned unsuccessful response: code: {}, message: {}.".format(
+ response["code"], response["message"]
+ )
+ )
+ if "expiration_time" in response and response["expiration_time"] < time.time():
+ raise exceptions.RefreshError(
+ "The token returned by the executable is expired."
+ )
+ if "token_type" not in response:
+ raise exceptions.MalformedError(
+ "The executable response is missing the token_type field."
+ )
+ if (
+ response["token_type"] == "urn:ietf:params:oauth:token-type:jwt"
+ or response["token_type"] == "urn:ietf:params:oauth:token-type:id_token"
+ ): # OIDC
+ return response["id_token"]
+ elif response["token_type"] == "urn:ietf:params:oauth:token-type:saml2": # SAML
+ return response["saml_response"]
+ else:
+ raise exceptions.RefreshError("Executable returned unsupported token type.")
+
+ def _validate_revoke_response(self, response):
+ self._validate_response_schema(response)
+ if not response["success"]:
+ raise exceptions.RefreshError("Revoke failed with unsuccessful response.")
+
+ def _validate_response_schema(self, response):
+ if "version" not in response:
+ raise exceptions.MalformedError(
+ "The executable response is missing the version field."
+ )
+ if response["version"] > EXECUTABLE_SUPPORTED_MAX_VERSION:
+ raise exceptions.RefreshError(
+ "Executable returned unsupported version {}.".format(
+ response["version"]
+ )
+ )
+
+ if "success" not in response:
+ raise exceptions.MalformedError(
+ "The executable response is missing the success field."
+ )
+
+ def _validate_running_mode(self):
+ env_allow_executables = os.environ.get(
+ "GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES"
+ )
+ if env_allow_executables != "1":
+ raise exceptions.MalformedError(
+ "Executables need to be explicitly allowed (set GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES to '1') to run."
+ )
+
+ if self.interactive and not self._credential_source_executable_output_file:
+ raise exceptions.MalformedError(
+ "An output_file must be specified in the credential configuration for interactive mode."
+ )
+
+ if (
+ self.interactive
+ and not self._credential_source_executable_interactive_timeout_millis
+ ):
+ raise exceptions.InvalidOperation(
+ "Interactive mode cannot run without an interactive timeout."
+ )
+
+ if self.interactive and not self.is_workforce_pool:
+ raise exceptions.InvalidValue(
+ "Interactive mode is only enabled for workforce pool."
+ )
+
+ def _create_default_metrics_options(self):
+ metrics_options = super(Credentials, self)._create_default_metrics_options()
+ metrics_options["source"] = "executable"
+ return metrics_options
diff --git a/Lib/site-packages/google/auth/transport/__init__.py b/Lib/site-packages/google/auth/transport/__init__.py
new file mode 100644
index 0000000..724568e
--- /dev/null
+++ b/Lib/site-packages/google/auth/transport/__init__.py
@@ -0,0 +1,103 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Transport - HTTP client library support.
+
+:mod:`google.auth` is designed to work with various HTTP client libraries such
+as urllib3 and requests. In order to work across these libraries with different
+interfaces some abstraction is needed.
+
+This module provides two interfaces that are implemented by transport adapters
+to support HTTP libraries. :class:`Request` defines the interface expected by
+:mod:`google.auth` to make requests. :class:`Response` defines the interface
+for the return value of :class:`Request`.
+"""
+
+import abc
+import http.client as http_client
+
+DEFAULT_RETRYABLE_STATUS_CODES = (
+ http_client.INTERNAL_SERVER_ERROR,
+ http_client.SERVICE_UNAVAILABLE,
+ http_client.REQUEST_TIMEOUT,
+ http_client.TOO_MANY_REQUESTS,
+)
+"""Sequence[int]: HTTP status codes indicating a request can be retried.
+"""
+
+
+DEFAULT_REFRESH_STATUS_CODES = (http_client.UNAUTHORIZED,)
+"""Sequence[int]: Which HTTP status code indicate that credentials should be
+refreshed.
+"""
+
+DEFAULT_MAX_REFRESH_ATTEMPTS = 2
+"""int: How many times to refresh the credentials and retry a request."""
+
+
+class Response(metaclass=abc.ABCMeta):
+ """HTTP Response data."""
+
+ @abc.abstractproperty
+ def status(self):
+ """int: The HTTP status code."""
+ raise NotImplementedError("status must be implemented.")
+
+ @abc.abstractproperty
+ def headers(self):
+ """Mapping[str, str]: The HTTP response headers."""
+ raise NotImplementedError("headers must be implemented.")
+
+ @abc.abstractproperty
+ def data(self):
+ """bytes: The response body."""
+ raise NotImplementedError("data must be implemented.")
+
+
+class Request(metaclass=abc.ABCMeta):
+ """Interface for a callable that makes HTTP requests.
+
+ Specific transport implementations should provide an implementation of
+ this that adapts their specific request / response API.
+
+ .. automethod:: __call__
+ """
+
+ @abc.abstractmethod
+ def __call__(
+ self, url, method="GET", body=None, headers=None, timeout=None, **kwargs
+ ):
+ """Make an HTTP request.
+
+ Args:
+ url (str): The URI to be requested.
+ method (str): The HTTP method to use for the request. Defaults
+ to 'GET'.
+ body (bytes): The payload / body in HTTP request.
+ headers (Mapping[str, str]): Request headers.
+ timeout (Optional[int]): The number of seconds to wait for a
+ response from the server. If not specified or if None, the
+ transport-specific default timeout will be used.
+ kwargs: Additionally arguments passed on to the transport's
+ request method.
+
+ Returns:
+ Response: The HTTP response.
+
+ Raises:
+ google.auth.exceptions.TransportError: If any exception occurred.
+ """
+ # pylint: disable=redundant-returns-doc, missing-raises-doc
+ # (pylint doesn't play well with abstract docstrings.)
+ raise NotImplementedError("__call__ must be implemented.")
diff --git a/Lib/site-packages/google/auth/transport/_aiohttp_requests.py b/Lib/site-packages/google/auth/transport/_aiohttp_requests.py
new file mode 100644
index 0000000..3a8da91
--- /dev/null
+++ b/Lib/site-packages/google/auth/transport/_aiohttp_requests.py
@@ -0,0 +1,390 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Transport adapter for Async HTTP (aiohttp).
+
+NOTE: This async support is experimental and marked internal. This surface may
+change in minor releases.
+"""
+
+from __future__ import absolute_import
+
+import asyncio
+import functools
+
+import aiohttp # type: ignore
+import urllib3 # type: ignore
+
+from google.auth import exceptions
+from google.auth import transport
+from google.auth.transport import requests
+
+# Timeout can be re-defined depending on async requirement. Currently made 60s more than
+# sync timeout.
+_DEFAULT_TIMEOUT = 180 # in seconds
+
+
+class _CombinedResponse(transport.Response):
+ """
+ In order to more closely resemble the `requests` interface, where a raw
+ and deflated content could be accessed at once, this class lazily reads the
+ stream in `transport.Response` so both return forms can be used.
+
+ The gzip and deflate transfer-encodings are automatically decoded for you
+ because the default parameter for autodecompress into the ClientSession is set
+ to False, and therefore we add this class to act as a wrapper for a user to be
+ able to access both the raw and decoded response bodies - mirroring the sync
+ implementation.
+ """
+
+ def __init__(self, response):
+ self._response = response
+ self._raw_content = None
+
+ def _is_compressed(self):
+ headers = self._response.headers
+ return "Content-Encoding" in headers and (
+ headers["Content-Encoding"] == "gzip"
+ or headers["Content-Encoding"] == "deflate"
+ )
+
+ @property
+ def status(self):
+ return self._response.status
+
+ @property
+ def headers(self):
+ return self._response.headers
+
+ @property
+ def data(self):
+ return self._response.content
+
+ async def raw_content(self):
+ if self._raw_content is None:
+ self._raw_content = await self._response.content.read()
+ return self._raw_content
+
+ async def content(self):
+ # Load raw_content if necessary
+ await self.raw_content()
+ if self._is_compressed():
+ decoder = urllib3.response.MultiDecoder(
+ self._response.headers["Content-Encoding"]
+ )
+ decompressed = decoder.decompress(self._raw_content)
+ return decompressed
+
+ return self._raw_content
+
+
+class _Response(transport.Response):
+ """
+ Requests transport response adapter.
+
+ Args:
+ response (requests.Response): The raw Requests response.
+ """
+
+ def __init__(self, response):
+ self._response = response
+
+ @property
+ def status(self):
+ return self._response.status
+
+ @property
+ def headers(self):
+ return self._response.headers
+
+ @property
+ def data(self):
+ return self._response.content
+
+
+class Request(transport.Request):
+ """Requests request adapter.
+
+ This class is used internally for making requests using asyncio transports
+ in a consistent way. If you use :class:`AuthorizedSession` you do not need
+ to construct or use this class directly.
+
+ This class can be useful if you want to manually refresh a
+ :class:`~google.auth.credentials.Credentials` instance::
+
+ import google.auth.transport.aiohttp_requests
+
+ request = google.auth.transport.aiohttp_requests.Request()
+
+ credentials.refresh(request)
+
+ Args:
+ session (aiohttp.ClientSession): An instance :class:`aiohttp.ClientSession` used
+ to make HTTP requests. If not specified, a session will be created.
+
+ .. automethod:: __call__
+ """
+
+ def __init__(self, session=None):
+ # TODO: Use auto_decompress property for aiohttp 3.7+
+ if session is not None and session._auto_decompress:
+ raise exceptions.InvalidOperation(
+ "Client sessions with auto_decompress=True are not supported."
+ )
+ self.session = session
+
+ async def __call__(
+ self,
+ url,
+ method="GET",
+ body=None,
+ headers=None,
+ timeout=_DEFAULT_TIMEOUT,
+ **kwargs,
+ ):
+ """
+ Make an HTTP request using aiohttp.
+
+ Args:
+ url (str): The URL to be requested.
+ method (Optional[str]):
+ The HTTP method to use for the request. Defaults to 'GET'.
+ body (Optional[bytes]):
+ The payload or body in HTTP request.
+ headers (Optional[Mapping[str, str]]):
+ Request headers.
+ timeout (Optional[int]): The number of seconds to wait for a
+ response from the server. If not specified or if None, the
+ requests default timeout will be used.
+ kwargs: Additional arguments passed through to the underlying
+ requests :meth:`requests.Session.request` method.
+
+ Returns:
+ google.auth.transport.Response: The HTTP response.
+
+ Raises:
+ google.auth.exceptions.TransportError: If any exception occurred.
+ """
+
+ try:
+ if self.session is None: # pragma: NO COVER
+ self.session = aiohttp.ClientSession(
+ auto_decompress=False
+ ) # pragma: NO COVER
+ requests._LOGGER.debug("Making request: %s %s", method, url)
+ response = await self.session.request(
+ method, url, data=body, headers=headers, timeout=timeout, **kwargs
+ )
+ return _CombinedResponse(response)
+
+ except aiohttp.ClientError as caught_exc:
+ new_exc = exceptions.TransportError(caught_exc)
+ raise new_exc from caught_exc
+
+ except asyncio.TimeoutError as caught_exc:
+ new_exc = exceptions.TransportError(caught_exc)
+ raise new_exc from caught_exc
+
+
+class AuthorizedSession(aiohttp.ClientSession):
+ """This is an async implementation of the Authorized Session class. We utilize an
+ aiohttp transport instance, and the interface mirrors the google.auth.transport.requests
+ Authorized Session class, except for the change in the transport used in the async use case.
+
+ A Requests Session class with credentials.
+
+ This class is used to perform requests to API endpoints that require
+ authorization::
+
+ from google.auth.transport import aiohttp_requests
+
+ async with aiohttp_requests.AuthorizedSession(credentials) as authed_session:
+ response = await authed_session.request(
+ 'GET', 'https://www.googleapis.com/storage/v1/b')
+
+ The underlying :meth:`request` implementation handles adding the
+ credentials' headers to the request and refreshing credentials as needed.
+
+ Args:
+ credentials (google.auth._credentials_async.Credentials):
+ The credentials to add to the request.
+ refresh_status_codes (Sequence[int]): Which HTTP status codes indicate
+ that credentials should be refreshed and the request should be
+ retried.
+ max_refresh_attempts (int): The maximum number of times to attempt to
+ refresh the credentials and retry the request.
+ refresh_timeout (Optional[int]): The timeout value in seconds for
+ credential refresh HTTP requests.
+ auth_request (google.auth.transport.aiohttp_requests.Request):
+ (Optional) An instance of
+ :class:`~google.auth.transport.aiohttp_requests.Request` used when
+ refreshing credentials. If not passed,
+ an instance of :class:`~google.auth.transport.aiohttp_requests.Request`
+ is created.
+ kwargs: Additional arguments passed through to the underlying
+ ClientSession :meth:`aiohttp.ClientSession` object.
+ """
+
+ def __init__(
+ self,
+ credentials,
+ refresh_status_codes=transport.DEFAULT_REFRESH_STATUS_CODES,
+ max_refresh_attempts=transport.DEFAULT_MAX_REFRESH_ATTEMPTS,
+ refresh_timeout=None,
+ auth_request=None,
+ auto_decompress=False,
+ **kwargs,
+ ):
+ super(AuthorizedSession, self).__init__(**kwargs)
+ self.credentials = credentials
+ self._refresh_status_codes = refresh_status_codes
+ self._max_refresh_attempts = max_refresh_attempts
+ self._refresh_timeout = refresh_timeout
+ self._is_mtls = False
+ self._auth_request = auth_request
+ self._auth_request_session = None
+ self._loop = asyncio.get_event_loop()
+ self._refresh_lock = asyncio.Lock()
+ self._auto_decompress = auto_decompress
+
+ async def request(
+ self,
+ method,
+ url,
+ data=None,
+ headers=None,
+ max_allowed_time=None,
+ timeout=_DEFAULT_TIMEOUT,
+ auto_decompress=False,
+ **kwargs,
+ ):
+
+ """Implementation of Authorized Session aiohttp request.
+
+ Args:
+ method (str):
+ The http request method used (e.g. GET, PUT, DELETE)
+ url (str):
+ The url at which the http request is sent.
+ data (Optional[dict]): Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the Request.
+ headers (Optional[dict]): Dictionary of HTTP Headers to send with the
+ Request.
+ timeout (Optional[Union[float, aiohttp.ClientTimeout]]):
+ The amount of time in seconds to wait for the server response
+ with each individual request. Can also be passed as an
+ ``aiohttp.ClientTimeout`` object.
+ max_allowed_time (Optional[float]):
+ If the method runs longer than this, a ``Timeout`` exception is
+ automatically raised. Unlike the ``timeout`` parameter, this
+ value applies to the total method execution time, even if
+ multiple requests are made under the hood.
+
+ Mind that it is not guaranteed that the timeout error is raised
+ at ``max_allowed_time``. It might take longer, for example, if
+ an underlying request takes a lot of time, but the request
+ itself does not timeout, e.g. if a large file is being
+ transmitted. The timout error will be raised after such
+ request completes.
+ """
+ # Headers come in as bytes which isn't expected behavior, the resumable
+ # media libraries in some cases expect a str type for the header values,
+ # but sometimes the operations return these in bytes types.
+ if headers:
+ for key in headers.keys():
+ if type(headers[key]) is bytes:
+ headers[key] = headers[key].decode("utf-8")
+
+ async with aiohttp.ClientSession(
+ auto_decompress=self._auto_decompress
+ ) as self._auth_request_session:
+ auth_request = Request(self._auth_request_session)
+ self._auth_request = auth_request
+
+ # Use a kwarg for this instead of an attribute to maintain
+ # thread-safety.
+ _credential_refresh_attempt = kwargs.pop("_credential_refresh_attempt", 0)
+ # Make a copy of the headers. They will be modified by the credentials
+ # and we want to pass the original headers if we recurse.
+ request_headers = headers.copy() if headers is not None else {}
+
+ # Do not apply the timeout unconditionally in order to not override the
+ # _auth_request's default timeout.
+ auth_request = (
+ self._auth_request
+ if timeout is None
+ else functools.partial(self._auth_request, timeout=timeout)
+ )
+
+ remaining_time = max_allowed_time
+
+ with requests.TimeoutGuard(remaining_time, asyncio.TimeoutError) as guard:
+ await self.credentials.before_request(
+ auth_request, method, url, request_headers
+ )
+
+ with requests.TimeoutGuard(remaining_time, asyncio.TimeoutError) as guard:
+ response = await super(AuthorizedSession, self).request(
+ method,
+ url,
+ data=data,
+ headers=request_headers,
+ timeout=timeout,
+ **kwargs,
+ )
+
+ remaining_time = guard.remaining_timeout
+
+ if (
+ response.status in self._refresh_status_codes
+ and _credential_refresh_attempt < self._max_refresh_attempts
+ ):
+
+ requests._LOGGER.info(
+ "Refreshing credentials due to a %s response. Attempt %s/%s.",
+ response.status,
+ _credential_refresh_attempt + 1,
+ self._max_refresh_attempts,
+ )
+
+ # Do not apply the timeout unconditionally in order to not override the
+ # _auth_request's default timeout.
+ auth_request = (
+ self._auth_request
+ if timeout is None
+ else functools.partial(self._auth_request, timeout=timeout)
+ )
+
+ with requests.TimeoutGuard(
+ remaining_time, asyncio.TimeoutError
+ ) as guard:
+ async with self._refresh_lock:
+ await self._loop.run_in_executor(
+ None, self.credentials.refresh, auth_request
+ )
+
+ remaining_time = guard.remaining_timeout
+
+ return await self.request(
+ method,
+ url,
+ data=data,
+ headers=headers,
+ max_allowed_time=remaining_time,
+ timeout=timeout,
+ _credential_refresh_attempt=_credential_refresh_attempt + 1,
+ **kwargs,
+ )
+
+ return response
diff --git a/Lib/site-packages/google/auth/transport/_custom_tls_signer.py b/Lib/site-packages/google/auth/transport/_custom_tls_signer.py
new file mode 100644
index 0000000..57a563d
--- /dev/null
+++ b/Lib/site-packages/google/auth/transport/_custom_tls_signer.py
@@ -0,0 +1,271 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Code for configuring client side TLS to offload the signing operation to
+signing libraries.
+"""
+
+import ctypes
+import json
+import logging
+import os
+import sys
+
+import cffi # type: ignore
+
+from google.auth import exceptions
+
+_LOGGER = logging.getLogger(__name__)
+
+# C++ offload lib requires google-auth lib to provide the following callback:
+# using SignFunc = int (*)(unsigned char *sig, size_t *sig_len,
+# const unsigned char *tbs, size_t tbs_len)
+# The bytes to be signed and the length are provided via `tbs` and `tbs_len`,
+# the callback computes the signature, and write the signature and its length
+# into `sig` and `sig_len`.
+# If the signing is successful, the callback returns 1, otherwise it returns 0.
+SIGN_CALLBACK_CTYPE = ctypes.CFUNCTYPE(
+ ctypes.c_int, # return type
+ ctypes.POINTER(ctypes.c_ubyte), # sig
+ ctypes.POINTER(ctypes.c_size_t), # sig_len
+ ctypes.POINTER(ctypes.c_ubyte), # tbs
+ ctypes.c_size_t, # tbs_len
+)
+
+
+# Cast SSL_CTX* to void*
+def _cast_ssl_ctx_to_void_p(ssl_ctx):
+ return ctypes.cast(int(cffi.FFI().cast("intptr_t", ssl_ctx)), ctypes.c_void_p)
+
+
+# Load offload library and set up the function types.
+def load_offload_lib(offload_lib_path):
+ _LOGGER.debug("loading offload library from %s", offload_lib_path)
+
+ # winmode parameter is only available for python 3.8+.
+ lib = (
+ ctypes.CDLL(offload_lib_path, winmode=0)
+ if sys.version_info >= (3, 8) and os.name == "nt"
+ else ctypes.CDLL(offload_lib_path)
+ )
+
+ # Set up types for:
+ # int ConfigureSslContext(SignFunc sign_func, const char *cert, SSL_CTX *ctx)
+ lib.ConfigureSslContext.argtypes = [
+ SIGN_CALLBACK_CTYPE,
+ ctypes.c_char_p,
+ ctypes.c_void_p,
+ ]
+ lib.ConfigureSslContext.restype = ctypes.c_int
+
+ return lib
+
+
+# Load signer library and set up the function types.
+# See: https://github.com/googleapis/enterprise-certificate-proxy/blob/main/cshared/main.go
+def load_signer_lib(signer_lib_path):
+ _LOGGER.debug("loading signer library from %s", signer_lib_path)
+
+ # winmode parameter is only available for python 3.8+.
+ lib = (
+ ctypes.CDLL(signer_lib_path, winmode=0)
+ if sys.version_info >= (3, 8) and os.name == "nt"
+ else ctypes.CDLL(signer_lib_path)
+ )
+
+ # Set up types for:
+ # func GetCertPemForPython(configFilePath *C.char, certHolder *byte, certHolderLen int)
+ lib.GetCertPemForPython.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_int]
+ # Returns: certLen
+ lib.GetCertPemForPython.restype = ctypes.c_int
+
+ # Set up types for:
+ # func SignForPython(configFilePath *C.char, digest *byte, digestLen int,
+ # sigHolder *byte, sigHolderLen int)
+ lib.SignForPython.argtypes = [
+ ctypes.c_char_p,
+ ctypes.c_char_p,
+ ctypes.c_int,
+ ctypes.c_char_p,
+ ctypes.c_int,
+ ]
+ # Returns: the signature length
+ lib.SignForPython.restype = ctypes.c_int
+
+ return lib
+
+
+def load_provider_lib(provider_lib_path):
+ _LOGGER.debug("loading provider library from %s", provider_lib_path)
+
+ # winmode parameter is only available for python 3.8+.
+ lib = (
+ ctypes.CDLL(provider_lib_path, winmode=0)
+ if sys.version_info >= (3, 8) and os.name == "nt"
+ else ctypes.CDLL(provider_lib_path)
+ )
+
+ lib.ECP_attach_to_ctx.argtypes = [ctypes.c_void_p, ctypes.c_char_p]
+ lib.ECP_attach_to_ctx.restype = ctypes.c_int
+
+ return lib
+
+
+# Computes SHA256 hash.
+def _compute_sha256_digest(to_be_signed, to_be_signed_len):
+ from cryptography.hazmat.primitives import hashes
+
+ data = ctypes.string_at(to_be_signed, to_be_signed_len)
+ hash = hashes.Hash(hashes.SHA256())
+ hash.update(data)
+ return hash.finalize()
+
+
+# Create the signing callback. The actual signing work is done by the
+# `SignForPython` method from the signer lib.
+def get_sign_callback(signer_lib, config_file_path):
+ def sign_callback(sig, sig_len, tbs, tbs_len):
+ _LOGGER.debug("calling sign callback...")
+
+ digest = _compute_sha256_digest(tbs, tbs_len)
+ digestArray = ctypes.c_char * len(digest)
+
+ # reserve 2000 bytes for the signature, shoud be more then enough.
+ # RSA signature is 256 bytes, EC signature is 70~72.
+ sig_holder_len = 2000
+ sig_holder = ctypes.create_string_buffer(sig_holder_len)
+
+ signature_len = signer_lib.SignForPython(
+ config_file_path.encode(), # configFilePath
+ digestArray.from_buffer(bytearray(digest)), # digest
+ len(digest), # digestLen
+ sig_holder, # sigHolder
+ sig_holder_len, # sigHolderLen
+ )
+
+ if signature_len == 0:
+ # signing failed, return 0
+ return 0
+
+ sig_len[0] = signature_len
+ bs = bytearray(sig_holder)
+ for i in range(signature_len):
+ sig[i] = bs[i]
+
+ return 1
+
+ return SIGN_CALLBACK_CTYPE(sign_callback)
+
+
+# Obtain the certificate bytes by calling the `GetCertPemForPython` method from
+# the signer lib. The method is called twice, the first time is to compute the
+# cert length, then we create a buffer to hold the cert, and call it again to
+# fill the buffer.
+def get_cert(signer_lib, config_file_path):
+ # First call to calculate the cert length
+ cert_len = signer_lib.GetCertPemForPython(
+ config_file_path.encode(), # configFilePath
+ None, # certHolder
+ 0, # certHolderLen
+ )
+ if cert_len == 0:
+ raise exceptions.MutualTLSChannelError("failed to get certificate")
+
+ # Then we create an array to hold the cert, and call again to fill the cert
+ cert_holder = ctypes.create_string_buffer(cert_len)
+ signer_lib.GetCertPemForPython(
+ config_file_path.encode(), # configFilePath
+ cert_holder, # certHolder
+ cert_len, # certHolderLen
+ )
+ return bytes(cert_holder)
+
+
+class CustomTlsSigner(object):
+ def __init__(self, enterprise_cert_file_path):
+ """
+ This class loads the offload and signer library, and calls APIs from
+ these libraries to obtain the cert and a signing callback, and attach
+ them to SSL context. The cert and the signing callback will be used
+ for client authentication in TLS handshake.
+
+ Args:
+ enterprise_cert_file_path (str): the path to a enterprise cert JSON
+ file. The file should contain the following field:
+
+ {
+ "libs": {
+ "ecp_client": "...",
+ "tls_offload": "..."
+ }
+ }
+ """
+ self._enterprise_cert_file_path = enterprise_cert_file_path
+ self._cert = None
+ self._sign_callback = None
+ self._provider_lib = None
+
+ def load_libraries(self):
+ with open(self._enterprise_cert_file_path, "r") as f:
+ enterprise_cert_json = json.load(f)
+ libs = enterprise_cert_json.get("libs", {})
+
+ signer_library = libs.get("ecp_client", None)
+ offload_library = libs.get("tls_offload", None)
+ provider_library = libs.get("ecp_provider", None)
+
+ # Using newer provider implementation. This is mutually exclusive to the
+ # offload implementation.
+ if provider_library:
+ self._provider_lib = load_provider_lib(provider_library)
+ return
+
+ # Using old offload implementation
+ if offload_library and signer_library:
+ self._offload_lib = load_offload_lib(offload_library)
+ self._signer_lib = load_signer_lib(signer_library)
+ self.set_up_custom_key()
+ return
+
+ raise exceptions.MutualTLSChannelError("enterprise cert file is invalid")
+
+ def set_up_custom_key(self):
+ # We need to keep a reference of the cert and sign callback so it won't
+ # be garbage collected, otherwise it will crash when used by signer lib.
+ self._cert = get_cert(self._signer_lib, self._enterprise_cert_file_path)
+ self._sign_callback = get_sign_callback(
+ self._signer_lib, self._enterprise_cert_file_path
+ )
+
+ def attach_to_ssl_context(self, ctx):
+ if self._provider_lib:
+ if not self._provider_lib.ECP_attach_to_ctx(
+ _cast_ssl_ctx_to_void_p(ctx._ctx._context),
+ self._enterprise_cert_file_path.encode("ascii"),
+ ):
+ raise exceptions.MutualTLSChannelError(
+ "failed to configure ECP Provider SSL context"
+ )
+ elif self._offload_lib and self._signer_lib:
+ if not self._offload_lib.ConfigureSslContext(
+ self._sign_callback,
+ ctypes.c_char_p(self._cert),
+ _cast_ssl_ctx_to_void_p(ctx._ctx._context),
+ ):
+ raise exceptions.MutualTLSChannelError(
+ "failed to configure ECP Offload SSL context"
+ )
+ else:
+ raise exceptions.MutualTLSChannelError("Invalid ECP configuration.")
diff --git a/Lib/site-packages/google/auth/transport/_http_client.py b/Lib/site-packages/google/auth/transport/_http_client.py
new file mode 100644
index 0000000..cec0ab7
--- /dev/null
+++ b/Lib/site-packages/google/auth/transport/_http_client.py
@@ -0,0 +1,113 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Transport adapter for http.client, for internal use only."""
+
+import http.client as http_client
+import logging
+import socket
+import urllib
+
+from google.auth import exceptions
+from google.auth import transport
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class Response(transport.Response):
+ """http.client transport response adapter.
+
+ Args:
+ response (http.client.HTTPResponse): The raw http client response.
+ """
+
+ def __init__(self, response):
+ self._status = response.status
+ self._headers = {key.lower(): value for key, value in response.getheaders()}
+ self._data = response.read()
+
+ @property
+ def status(self):
+ return self._status
+
+ @property
+ def headers(self):
+ return self._headers
+
+ @property
+ def data(self):
+ return self._data
+
+
+class Request(transport.Request):
+ """http.client transport request adapter."""
+
+ def __call__(
+ self, url, method="GET", body=None, headers=None, timeout=None, **kwargs
+ ):
+ """Make an HTTP request using http.client.
+
+ Args:
+ url (str): The URI to be requested.
+ method (str): The HTTP method to use for the request. Defaults
+ to 'GET'.
+ body (bytes): The payload / body in HTTP request.
+ headers (Mapping): Request headers.
+ timeout (Optional(int)): The number of seconds to wait for a
+ response from the server. If not specified or if None, the
+ socket global default timeout will be used.
+ kwargs: Additional arguments passed throught to the underlying
+ :meth:`~http.client.HTTPConnection.request` method.
+
+ Returns:
+ Response: The HTTP response.
+
+ Raises:
+ google.auth.exceptions.TransportError: If any exception occurred.
+ """
+ # socket._GLOBAL_DEFAULT_TIMEOUT is the default in http.client.
+ if timeout is None:
+ timeout = socket._GLOBAL_DEFAULT_TIMEOUT
+
+ # http.client doesn't allow None as the headers argument.
+ if headers is None:
+ headers = {}
+
+ # http.client needs the host and path parts specified separately.
+ parts = urllib.parse.urlsplit(url)
+ path = urllib.parse.urlunsplit(
+ ("", "", parts.path, parts.query, parts.fragment)
+ )
+
+ if parts.scheme != "http":
+ raise exceptions.TransportError(
+ "http.client transport only supports the http scheme, {}"
+ "was specified".format(parts.scheme)
+ )
+
+ connection = http_client.HTTPConnection(parts.netloc, timeout=timeout)
+
+ try:
+ _LOGGER.debug("Making request: %s %s", method, url)
+
+ connection.request(method, path, body=body, headers=headers, **kwargs)
+ response = connection.getresponse()
+ return Response(response)
+
+ except (http_client.HTTPException, socket.error) as caught_exc:
+ new_exc = exceptions.TransportError(caught_exc)
+ raise new_exc from caught_exc
+
+ finally:
+ connection.close()
diff --git a/Lib/site-packages/google/auth/transport/_mtls_helper.py b/Lib/site-packages/google/auth/transport/_mtls_helper.py
new file mode 100644
index 0000000..1b9b9c2
--- /dev/null
+++ b/Lib/site-packages/google/auth/transport/_mtls_helper.py
@@ -0,0 +1,252 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper functions for getting mTLS cert and key."""
+
+import json
+import logging
+from os import path
+import re
+import subprocess
+
+from google.auth import exceptions
+
+CONTEXT_AWARE_METADATA_PATH = "~/.secureConnect/context_aware_metadata.json"
+_CERT_PROVIDER_COMMAND = "cert_provider_command"
+_CERT_REGEX = re.compile(
+ b"-----BEGIN CERTIFICATE-----.+-----END CERTIFICATE-----\r?\n?", re.DOTALL
+)
+
+# support various format of key files, e.g.
+# "-----BEGIN PRIVATE KEY-----...",
+# "-----BEGIN EC PRIVATE KEY-----...",
+# "-----BEGIN RSA PRIVATE KEY-----..."
+# "-----BEGIN ENCRYPTED PRIVATE KEY-----"
+_KEY_REGEX = re.compile(
+ b"-----BEGIN [A-Z ]*PRIVATE KEY-----.+-----END [A-Z ]*PRIVATE KEY-----\r?\n?",
+ re.DOTALL,
+)
+
+_LOGGER = logging.getLogger(__name__)
+
+
+_PASSPHRASE_REGEX = re.compile(
+ b"-----BEGIN PASSPHRASE-----(.+)-----END PASSPHRASE-----", re.DOTALL
+)
+
+
+def _check_dca_metadata_path(metadata_path):
+ """Checks for context aware metadata. If it exists, returns the absolute path;
+ otherwise returns None.
+
+ Args:
+ metadata_path (str): context aware metadata path.
+
+ Returns:
+ str: absolute path if exists and None otherwise.
+ """
+ metadata_path = path.expanduser(metadata_path)
+ if not path.exists(metadata_path):
+ _LOGGER.debug("%s is not found, skip client SSL authentication.", metadata_path)
+ return None
+ return metadata_path
+
+
+def _read_dca_metadata_file(metadata_path):
+ """Loads context aware metadata from the given path.
+
+ Args:
+ metadata_path (str): context aware metadata path.
+
+ Returns:
+ Dict[str, str]: The metadata.
+
+ Raises:
+ google.auth.exceptions.ClientCertError: If failed to parse metadata as JSON.
+ """
+ try:
+ with open(metadata_path) as f:
+ metadata = json.load(f)
+ except ValueError as caught_exc:
+ new_exc = exceptions.ClientCertError(caught_exc)
+ raise new_exc from caught_exc
+
+ return metadata
+
+
+def _run_cert_provider_command(command, expect_encrypted_key=False):
+ """Run the provided command, and return client side mTLS cert, key and
+ passphrase.
+
+ Args:
+ command (List[str]): cert provider command.
+ expect_encrypted_key (bool): If encrypted private key is expected.
+
+ Returns:
+ Tuple[bytes, bytes, bytes]: client certificate bytes in PEM format, key
+ bytes in PEM format and passphrase bytes.
+
+ Raises:
+ google.auth.exceptions.ClientCertError: if problems occurs when running
+ the cert provider command or generating cert, key and passphrase.
+ """
+ try:
+ process = subprocess.Popen(
+ command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
+ )
+ stdout, stderr = process.communicate()
+ except OSError as caught_exc:
+ new_exc = exceptions.ClientCertError(caught_exc)
+ raise new_exc from caught_exc
+
+ # Check cert provider command execution error.
+ if process.returncode != 0:
+ raise exceptions.ClientCertError(
+ "Cert provider command returns non-zero status code %s" % process.returncode
+ )
+
+ # Extract certificate (chain), key and passphrase.
+ cert_match = re.findall(_CERT_REGEX, stdout)
+ if len(cert_match) != 1:
+ raise exceptions.ClientCertError("Client SSL certificate is missing or invalid")
+ key_match = re.findall(_KEY_REGEX, stdout)
+ if len(key_match) != 1:
+ raise exceptions.ClientCertError("Client SSL key is missing or invalid")
+ passphrase_match = re.findall(_PASSPHRASE_REGEX, stdout)
+
+ if expect_encrypted_key:
+ if len(passphrase_match) != 1:
+ raise exceptions.ClientCertError("Passphrase is missing or invalid")
+ if b"ENCRYPTED" not in key_match[0]:
+ raise exceptions.ClientCertError("Encrypted private key is expected")
+ return cert_match[0], key_match[0], passphrase_match[0].strip()
+
+ if b"ENCRYPTED" in key_match[0]:
+ raise exceptions.ClientCertError("Encrypted private key is not expected")
+ if len(passphrase_match) > 0:
+ raise exceptions.ClientCertError("Passphrase is not expected")
+ return cert_match[0], key_match[0], None
+
+
+def get_client_ssl_credentials(
+ generate_encrypted_key=False,
+ context_aware_metadata_path=CONTEXT_AWARE_METADATA_PATH,
+):
+ """Returns the client side certificate, private key and passphrase.
+
+ Args:
+ generate_encrypted_key (bool): If set to True, encrypted private key
+ and passphrase will be generated; otherwise, unencrypted private key
+ will be generated and passphrase will be None.
+ context_aware_metadata_path (str): The context_aware_metadata.json file path.
+
+ Returns:
+ Tuple[bool, bytes, bytes, bytes]:
+ A boolean indicating if cert, key and passphrase are obtained, the
+ cert bytes and key bytes both in PEM format, and passphrase bytes.
+
+ Raises:
+ google.auth.exceptions.ClientCertError: if problems occurs when getting
+ the cert, key and passphrase.
+ """
+ metadata_path = _check_dca_metadata_path(context_aware_metadata_path)
+
+ if metadata_path:
+ metadata_json = _read_dca_metadata_file(metadata_path)
+
+ if _CERT_PROVIDER_COMMAND not in metadata_json:
+ raise exceptions.ClientCertError("Cert provider command is not found")
+
+ command = metadata_json[_CERT_PROVIDER_COMMAND]
+
+ if generate_encrypted_key and "--with_passphrase" not in command:
+ command.append("--with_passphrase")
+
+ # Execute the command.
+ cert, key, passphrase = _run_cert_provider_command(
+ command, expect_encrypted_key=generate_encrypted_key
+ )
+ return True, cert, key, passphrase
+
+ return False, None, None, None
+
+
+def get_client_cert_and_key(client_cert_callback=None):
+ """Returns the client side certificate and private key. The function first
+ tries to get certificate and key from client_cert_callback; if the callback
+ is None or doesn't provide certificate and key, the function tries application
+ default SSL credentials.
+
+ Args:
+ client_cert_callback (Optional[Callable[[], (bytes, bytes)]]): An
+ optional callback which returns client certificate bytes and private
+ key bytes both in PEM format.
+
+ Returns:
+ Tuple[bool, bytes, bytes]:
+ A boolean indicating if cert and key are obtained, the cert bytes
+ and key bytes both in PEM format.
+
+ Raises:
+ google.auth.exceptions.ClientCertError: if problems occurs when getting
+ the cert and key.
+ """
+ if client_cert_callback:
+ cert, key = client_cert_callback()
+ return True, cert, key
+
+ has_cert, cert, key, _ = get_client_ssl_credentials(generate_encrypted_key=False)
+ return has_cert, cert, key
+
+
+def decrypt_private_key(key, passphrase):
+ """A helper function to decrypt the private key with the given passphrase.
+ google-auth library doesn't support passphrase protected private key for
+ mutual TLS channel. This helper function can be used to decrypt the
+ passphrase protected private key in order to estalish mutual TLS channel.
+
+ For example, if you have a function which produces client cert, passphrase
+ protected private key and passphrase, you can convert it to a client cert
+ callback function accepted by google-auth::
+
+ from google.auth.transport import _mtls_helper
+
+ def your_client_cert_function():
+ return cert, encrypted_key, passphrase
+
+ # callback accepted by google-auth for mutual TLS channel.
+ def client_cert_callback():
+ cert, encrypted_key, passphrase = your_client_cert_function()
+ decrypted_key = _mtls_helper.decrypt_private_key(encrypted_key,
+ passphrase)
+ return cert, decrypted_key
+
+ Args:
+ key (bytes): The private key bytes in PEM format.
+ passphrase (bytes): The passphrase bytes.
+
+ Returns:
+ bytes: The decrypted private key in PEM format.
+
+ Raises:
+ ImportError: If pyOpenSSL is not installed.
+ OpenSSL.crypto.Error: If there is any problem decrypting the private key.
+ """
+ from OpenSSL import crypto
+
+ # First convert encrypted_key_bytes to PKey object
+ pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key, passphrase=passphrase)
+
+ # Then dump the decrypted key bytes
+ return crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)
diff --git a/Lib/site-packages/google/auth/transport/grpc.py b/Lib/site-packages/google/auth/transport/grpc.py
new file mode 100644
index 0000000..9a81797
--- /dev/null
+++ b/Lib/site-packages/google/auth/transport/grpc.py
@@ -0,0 +1,343 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Authorization support for gRPC."""
+
+from __future__ import absolute_import
+
+import logging
+import os
+
+from google.auth import environment_vars
+from google.auth import exceptions
+from google.auth.transport import _mtls_helper
+from google.oauth2 import service_account
+
+try:
+ import grpc # type: ignore
+except ImportError as caught_exc: # pragma: NO COVER
+ raise ImportError(
+ "gRPC is not installed from please install the grpcio package to use the gRPC transport."
+ ) from caught_exc
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class AuthMetadataPlugin(grpc.AuthMetadataPlugin):
+ """A `gRPC AuthMetadataPlugin`_ that inserts the credentials into each
+ request.
+
+ .. _gRPC AuthMetadataPlugin:
+ http://www.grpc.io/grpc/python/grpc.html#grpc.AuthMetadataPlugin
+
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials to
+ add to requests.
+ request (google.auth.transport.Request): A HTTP transport request
+ object used to refresh credentials as needed.
+ default_host (Optional[str]): A host like "pubsub.googleapis.com".
+ This is used when a self-signed JWT is created from service
+ account credentials.
+ """
+
+ def __init__(self, credentials, request, default_host=None):
+ # pylint: disable=no-value-for-parameter
+ # pylint doesn't realize that the super method takes no arguments
+ # because this class is the same name as the superclass.
+ super(AuthMetadataPlugin, self).__init__()
+ self._credentials = credentials
+ self._request = request
+ self._default_host = default_host
+
+ def _get_authorization_headers(self, context):
+ """Gets the authorization headers for a request.
+
+ Returns:
+ Sequence[Tuple[str, str]]: A list of request headers (key, value)
+ to add to the request.
+ """
+ headers = {}
+
+ # https://google.aip.dev/auth/4111
+ # Attempt to use self-signed JWTs when a service account is used.
+ # A default host must be explicitly provided since it cannot always
+ # be determined from the context.service_url.
+ if isinstance(self._credentials, service_account.Credentials):
+ self._credentials._create_self_signed_jwt(
+ "https://{}/".format(self._default_host) if self._default_host else None
+ )
+
+ self._credentials.before_request(
+ self._request, context.method_name, context.service_url, headers
+ )
+
+ return list(headers.items())
+
+ def __call__(self, context, callback):
+ """Passes authorization metadata into the given callback.
+
+ Args:
+ context (grpc.AuthMetadataContext): The RPC context.
+ callback (grpc.AuthMetadataPluginCallback): The callback that will
+ be invoked to pass in the authorization metadata.
+ """
+ callback(self._get_authorization_headers(context), None)
+
+
+def secure_authorized_channel(
+ credentials,
+ request,
+ target,
+ ssl_credentials=None,
+ client_cert_callback=None,
+ **kwargs
+):
+ """Creates a secure authorized gRPC channel.
+
+ This creates a channel with SSL and :class:`AuthMetadataPlugin`. This
+ channel can be used to create a stub that can make authorized requests.
+ Users can configure client certificate or rely on device certificates to
+ establish a mutual TLS channel, if the `GOOGLE_API_USE_CLIENT_CERTIFICATE`
+ variable is explicitly set to `true`.
+
+ Example::
+
+ import google.auth
+ import google.auth.transport.grpc
+ import google.auth.transport.requests
+ from google.cloud.speech.v1 import cloud_speech_pb2
+
+ # Get credentials.
+ credentials, _ = google.auth.default()
+
+ # Get an HTTP request function to refresh credentials.
+ request = google.auth.transport.requests.Request()
+
+ # Create a channel.
+ channel = google.auth.transport.grpc.secure_authorized_channel(
+ credentials, regular_endpoint, request,
+ ssl_credentials=grpc.ssl_channel_credentials())
+
+ # Use the channel to create a stub.
+ cloud_speech.create_Speech_stub(channel)
+
+ Usage:
+
+ There are actually a couple of options to create a channel, depending on if
+ you want to create a regular or mutual TLS channel.
+
+ First let's list the endpoints (regular vs mutual TLS) to choose from::
+
+ regular_endpoint = 'speech.googleapis.com:443'
+ mtls_endpoint = 'speech.mtls.googleapis.com:443'
+
+ Option 1: create a regular (non-mutual) TLS channel by explicitly setting
+ the ssl_credentials::
+
+ regular_ssl_credentials = grpc.ssl_channel_credentials()
+
+ channel = google.auth.transport.grpc.secure_authorized_channel(
+ credentials, regular_endpoint, request,
+ ssl_credentials=regular_ssl_credentials)
+
+ Option 2: create a mutual TLS channel by calling a callback which returns
+ the client side certificate and the key (Note that
+ `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be explicitly
+ set to `true`)::
+
+ def my_client_cert_callback():
+ code_to_load_client_cert_and_key()
+ if loaded:
+ return (pem_cert_bytes, pem_key_bytes)
+ raise MyClientCertFailureException()
+
+ try:
+ channel = google.auth.transport.grpc.secure_authorized_channel(
+ credentials, mtls_endpoint, request,
+ client_cert_callback=my_client_cert_callback)
+ except MyClientCertFailureException:
+ # handle the exception
+
+ Option 3: use application default SSL credentials. It searches and uses
+ the command in a context aware metadata file, which is available on devices
+ with endpoint verification support (Note that
+ `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be explicitly
+ set to `true`).
+ See https://cloud.google.com/endpoint-verification/docs/overview::
+
+ try:
+ default_ssl_credentials = SslCredentials()
+ except:
+ # Exception can be raised if the context aware metadata is malformed.
+ # See :class:`SslCredentials` for the possible exceptions.
+
+ # Choose the endpoint based on the SSL credentials type.
+ if default_ssl_credentials.is_mtls:
+ endpoint_to_use = mtls_endpoint
+ else:
+ endpoint_to_use = regular_endpoint
+ channel = google.auth.transport.grpc.secure_authorized_channel(
+ credentials, endpoint_to_use, request,
+ ssl_credentials=default_ssl_credentials)
+
+ Option 4: not setting ssl_credentials and client_cert_callback. For devices
+ without endpoint verification support or `GOOGLE_API_USE_CLIENT_CERTIFICATE`
+ environment variable is not `true`, a regular TLS channel is created;
+ otherwise, a mutual TLS channel is created, however, the call should be
+ wrapped in a try/except block in case of malformed context aware metadata.
+
+ The following code uses regular_endpoint, it works the same no matter the
+ created channle is regular or mutual TLS. Regular endpoint ignores client
+ certificate and key::
+
+ channel = google.auth.transport.grpc.secure_authorized_channel(
+ credentials, regular_endpoint, request)
+
+ The following code uses mtls_endpoint, if the created channle is regular,
+ and API mtls_endpoint is confgured to require client SSL credentials, API
+ calls using this channel will be rejected::
+
+ channel = google.auth.transport.grpc.secure_authorized_channel(
+ credentials, mtls_endpoint, request)
+
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials to
+ add to requests.
+ request (google.auth.transport.Request): A HTTP transport request
+ object used to refresh credentials as needed. Even though gRPC
+ is a separate transport, there's no way to refresh the credentials
+ without using a standard http transport.
+ target (str): The host and port of the service.
+ ssl_credentials (grpc.ChannelCredentials): Optional SSL channel
+ credentials. This can be used to specify different certificates.
+ This argument is mutually exclusive with client_cert_callback;
+ providing both will raise an exception.
+ If ssl_credentials and client_cert_callback are None, application
+ default SSL credentials are used if `GOOGLE_API_USE_CLIENT_CERTIFICATE`
+ environment variable is explicitly set to `true`, otherwise one way TLS
+ SSL credentials are used.
+ client_cert_callback (Callable[[], (bytes, bytes)]): Optional
+ callback function to obtain client certicate and key for mutual TLS
+ connection. This argument is mutually exclusive with
+ ssl_credentials; providing both will raise an exception.
+ This argument does nothing unless `GOOGLE_API_USE_CLIENT_CERTIFICATE`
+ environment variable is explicitly set to `true`.
+ kwargs: Additional arguments to pass to :func:`grpc.secure_channel`.
+
+ Returns:
+ grpc.Channel: The created gRPC channel.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS channel
+ creation failed for any reason.
+ """
+ # Create the metadata plugin for inserting the authorization header.
+ metadata_plugin = AuthMetadataPlugin(credentials, request)
+
+ # Create a set of grpc.CallCredentials using the metadata plugin.
+ google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin)
+
+ if ssl_credentials and client_cert_callback:
+ raise exceptions.MalformedError(
+ "Received both ssl_credentials and client_cert_callback; "
+ "these are mutually exclusive."
+ )
+
+ # If SSL credentials are not explicitly set, try client_cert_callback and ADC.
+ if not ssl_credentials:
+ use_client_cert = os.getenv(
+ environment_vars.GOOGLE_API_USE_CLIENT_CERTIFICATE, "false"
+ )
+ if use_client_cert == "true" and client_cert_callback:
+ # Use the callback if provided.
+ cert, key = client_cert_callback()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ elif use_client_cert == "true":
+ # Use application default SSL credentials.
+ adc_ssl_credentils = SslCredentials()
+ ssl_credentials = adc_ssl_credentils.ssl_credentials
+ else:
+ ssl_credentials = grpc.ssl_channel_credentials()
+
+ # Combine the ssl credentials and the authorization credentials.
+ composite_credentials = grpc.composite_channel_credentials(
+ ssl_credentials, google_auth_credentials
+ )
+
+ return grpc.secure_channel(target, composite_credentials, **kwargs)
+
+
+class SslCredentials:
+ """Class for application default SSL credentials.
+
+ The behavior is controlled by `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment
+ variable whose default value is `false`. Client certificate will not be used
+ unless the environment variable is explicitly set to `true`. See
+ https://google.aip.dev/auth/4114
+
+ If the environment variable is `true`, then for devices with endpoint verification
+ support, a device certificate will be automatically loaded and mutual TLS will
+ be established.
+ See https://cloud.google.com/endpoint-verification/docs/overview.
+ """
+
+ def __init__(self):
+ use_client_cert = os.getenv(
+ environment_vars.GOOGLE_API_USE_CLIENT_CERTIFICATE, "false"
+ )
+ if use_client_cert != "true":
+ self._is_mtls = False
+ else:
+ # Load client SSL credentials.
+ metadata_path = _mtls_helper._check_dca_metadata_path(
+ _mtls_helper.CONTEXT_AWARE_METADATA_PATH
+ )
+ self._is_mtls = metadata_path is not None
+
+ @property
+ def ssl_credentials(self):
+ """Get the created SSL channel credentials.
+
+ For devices with endpoint verification support, if the device certificate
+ loading has any problems, corresponding exceptions will be raised. For
+ a device without endpoint verification support, no exceptions will be
+ raised.
+
+ Returns:
+ grpc.ChannelCredentials: The created grpc channel credentials.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS channel
+ creation failed for any reason.
+ """
+ if self._is_mtls:
+ try:
+ _, cert, key, _ = _mtls_helper.get_client_ssl_credentials()
+ self._ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ except exceptions.ClientCertError as caught_exc:
+ new_exc = exceptions.MutualTLSChannelError(caught_exc)
+ raise new_exc from caught_exc
+ else:
+ self._ssl_credentials = grpc.ssl_channel_credentials()
+
+ return self._ssl_credentials
+
+ @property
+ def is_mtls(self):
+ """Indicates if the created SSL channel credentials is mutual TLS."""
+ return self._is_mtls
diff --git a/Lib/site-packages/google/auth/transport/mtls.py b/Lib/site-packages/google/auth/transport/mtls.py
new file mode 100644
index 0000000..c570761
--- /dev/null
+++ b/Lib/site-packages/google/auth/transport/mtls.py
@@ -0,0 +1,103 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilites for mutual TLS."""
+
+from google.auth import exceptions
+from google.auth.transport import _mtls_helper
+
+
+def has_default_client_cert_source():
+ """Check if default client SSL credentials exists on the device.
+
+ Returns:
+ bool: indicating if the default client cert source exists.
+ """
+ metadata_path = _mtls_helper._check_dca_metadata_path(
+ _mtls_helper.CONTEXT_AWARE_METADATA_PATH
+ )
+ return metadata_path is not None
+
+
+def default_client_cert_source():
+ """Get a callback which returns the default client SSL credentials.
+
+ Returns:
+ Callable[[], [bytes, bytes]]: A callback which returns the default
+ client certificate bytes and private key bytes, both in PEM format.
+
+ Raises:
+ google.auth.exceptions.DefaultClientCertSourceError: If the default
+ client SSL credentials don't exist or are malformed.
+ """
+ if not has_default_client_cert_source():
+ raise exceptions.MutualTLSChannelError(
+ "Default client cert source doesn't exist"
+ )
+
+ def callback():
+ try:
+ _, cert_bytes, key_bytes = _mtls_helper.get_client_cert_and_key()
+ except (OSError, RuntimeError, ValueError) as caught_exc:
+ new_exc = exceptions.MutualTLSChannelError(caught_exc)
+ raise new_exc from caught_exc
+
+ return cert_bytes, key_bytes
+
+ return callback
+
+
+def default_client_encrypted_cert_source(cert_path, key_path):
+ """Get a callback which returns the default encrpyted client SSL credentials.
+
+ Args:
+ cert_path (str): The cert file path. The default client certificate will
+ be written to this file when the returned callback is called.
+ key_path (str): The key file path. The default encrypted client key will
+ be written to this file when the returned callback is called.
+
+ Returns:
+ Callable[[], [str, str, bytes]]: A callback which generates the default
+ client certificate, encrpyted private key and passphrase. It writes
+ the certificate and private key into the cert_path and key_path, and
+ returns the cert_path, key_path and passphrase bytes.
+
+ Raises:
+ google.auth.exceptions.DefaultClientCertSourceError: If any problem
+ occurs when loading or saving the client certificate and key.
+ """
+ if not has_default_client_cert_source():
+ raise exceptions.MutualTLSChannelError(
+ "Default client encrypted cert source doesn't exist"
+ )
+
+ def callback():
+ try:
+ (
+ _,
+ cert_bytes,
+ key_bytes,
+ passphrase_bytes,
+ ) = _mtls_helper.get_client_ssl_credentials(generate_encrypted_key=True)
+ with open(cert_path, "wb") as cert_file:
+ cert_file.write(cert_bytes)
+ with open(key_path, "wb") as key_file:
+ key_file.write(key_bytes)
+ except (exceptions.ClientCertError, OSError) as caught_exc:
+ new_exc = exceptions.MutualTLSChannelError(caught_exc)
+ raise new_exc from caught_exc
+
+ return cert_path, key_path, passphrase_bytes
+
+ return callback
diff --git a/Lib/site-packages/google/auth/transport/requests.py b/Lib/site-packages/google/auth/transport/requests.py
new file mode 100644
index 0000000..aa16113
--- /dev/null
+++ b/Lib/site-packages/google/auth/transport/requests.py
@@ -0,0 +1,603 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Transport adapter for Requests."""
+
+from __future__ import absolute_import
+
+import functools
+import logging
+import numbers
+import os
+import time
+
+try:
+ import requests
+except ImportError as caught_exc: # pragma: NO COVER
+ raise ImportError(
+ "The requests library is not installed from please install the requests package to use the requests transport."
+ ) from caught_exc
+import requests.adapters # pylint: disable=ungrouped-imports
+import requests.exceptions # pylint: disable=ungrouped-imports
+from requests.packages.urllib3.util.ssl_ import ( # type: ignore
+ create_urllib3_context,
+) # pylint: disable=ungrouped-imports
+
+from google.auth import environment_vars
+from google.auth import exceptions
+from google.auth import transport
+import google.auth.transport._mtls_helper
+from google.oauth2 import service_account
+
+_LOGGER = logging.getLogger(__name__)
+
+_DEFAULT_TIMEOUT = 120 # in seconds
+
+
+class _Response(transport.Response):
+ """Requests transport response adapter.
+
+ Args:
+ response (requests.Response): The raw Requests response.
+ """
+
+ def __init__(self, response):
+ self._response = response
+
+ @property
+ def status(self):
+ return self._response.status_code
+
+ @property
+ def headers(self):
+ return self._response.headers
+
+ @property
+ def data(self):
+ return self._response.content
+
+
+class TimeoutGuard(object):
+ """A context manager raising an error if the suite execution took too long.
+
+ Args:
+ timeout (Union[None, Union[float, Tuple[float, float]]]):
+ The maximum number of seconds a suite can run without the context
+ manager raising a timeout exception on exit. If passed as a tuple,
+ the smaller of the values is taken as a timeout. If ``None``, a
+ timeout error is never raised.
+ timeout_error_type (Optional[Exception]):
+ The type of the error to raise on timeout. Defaults to
+ :class:`requests.exceptions.Timeout`.
+ """
+
+ def __init__(self, timeout, timeout_error_type=requests.exceptions.Timeout):
+ self._timeout = timeout
+ self.remaining_timeout = timeout
+ self._timeout_error_type = timeout_error_type
+
+ def __enter__(self):
+ self._start = time.time()
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ if exc_value:
+ return # let the error bubble up automatically
+
+ if self._timeout is None:
+ return # nothing to do, the timeout was not specified
+
+ elapsed = time.time() - self._start
+ deadline_hit = False
+
+ if isinstance(self._timeout, numbers.Number):
+ self.remaining_timeout = self._timeout - elapsed
+ deadline_hit = self.remaining_timeout <= 0
+ else:
+ self.remaining_timeout = tuple(x - elapsed for x in self._timeout)
+ deadline_hit = min(self.remaining_timeout) <= 0
+
+ if deadline_hit:
+ raise self._timeout_error_type()
+
+
+class Request(transport.Request):
+ """Requests request adapter.
+
+ This class is used internally for making requests using various transports
+ in a consistent way. If you use :class:`AuthorizedSession` you do not need
+ to construct or use this class directly.
+
+ This class can be useful if you want to manually refresh a
+ :class:`~google.auth.credentials.Credentials` instance::
+
+ import google.auth.transport.requests
+ import requests
+
+ request = google.auth.transport.requests.Request()
+
+ credentials.refresh(request)
+
+ Args:
+ session (requests.Session): An instance :class:`requests.Session` used
+ to make HTTP requests. If not specified, a session will be created.
+
+ .. automethod:: __call__
+ """
+
+ def __init__(self, session=None):
+ if not session:
+ session = requests.Session()
+
+ self.session = session
+
+ def __del__(self):
+ try:
+ if hasattr(self, "session") and self.session is not None:
+ self.session.close()
+ except TypeError:
+ # NOTE: For certain Python binary built, the queue.Empty exception
+ # might not be considered a normal Python exception causing
+ # TypeError.
+ pass
+
+ def __call__(
+ self,
+ url,
+ method="GET",
+ body=None,
+ headers=None,
+ timeout=_DEFAULT_TIMEOUT,
+ **kwargs
+ ):
+ """Make an HTTP request using requests.
+
+ Args:
+ url (str): The URI to be requested.
+ method (str): The HTTP method to use for the request. Defaults
+ to 'GET'.
+ body (bytes): The payload or body in HTTP request.
+ headers (Mapping[str, str]): Request headers.
+ timeout (Optional[int]): The number of seconds to wait for a
+ response from the server. If not specified or if None, the
+ requests default timeout will be used.
+ kwargs: Additional arguments passed through to the underlying
+ requests :meth:`~requests.Session.request` method.
+
+ Returns:
+ google.auth.transport.Response: The HTTP response.
+
+ Raises:
+ google.auth.exceptions.TransportError: If any exception occurred.
+ """
+ try:
+ _LOGGER.debug("Making request: %s %s", method, url)
+ response = self.session.request(
+ method, url, data=body, headers=headers, timeout=timeout, **kwargs
+ )
+ return _Response(response)
+ except requests.exceptions.RequestException as caught_exc:
+ new_exc = exceptions.TransportError(caught_exc)
+ raise new_exc from caught_exc
+
+
+class _MutualTlsAdapter(requests.adapters.HTTPAdapter):
+ """
+ A TransportAdapter that enables mutual TLS.
+
+ Args:
+ cert (bytes): client certificate in PEM format
+ key (bytes): client private key in PEM format
+
+ Raises:
+ ImportError: if certifi or pyOpenSSL is not installed
+ OpenSSL.crypto.Error: if client cert or key is invalid
+ """
+
+ def __init__(self, cert, key):
+ import certifi
+ from OpenSSL import crypto
+ import urllib3.contrib.pyopenssl # type: ignore
+
+ urllib3.contrib.pyopenssl.inject_into_urllib3()
+
+ pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key)
+ x509 = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
+
+ ctx_poolmanager = create_urllib3_context()
+ ctx_poolmanager.load_verify_locations(cafile=certifi.where())
+ ctx_poolmanager._ctx.use_certificate(x509)
+ ctx_poolmanager._ctx.use_privatekey(pkey)
+ self._ctx_poolmanager = ctx_poolmanager
+
+ ctx_proxymanager = create_urllib3_context()
+ ctx_proxymanager.load_verify_locations(cafile=certifi.where())
+ ctx_proxymanager._ctx.use_certificate(x509)
+ ctx_proxymanager._ctx.use_privatekey(pkey)
+ self._ctx_proxymanager = ctx_proxymanager
+
+ super(_MutualTlsAdapter, self).__init__()
+
+ def init_poolmanager(self, *args, **kwargs):
+ kwargs["ssl_context"] = self._ctx_poolmanager
+ super(_MutualTlsAdapter, self).init_poolmanager(*args, **kwargs)
+
+ def proxy_manager_for(self, *args, **kwargs):
+ kwargs["ssl_context"] = self._ctx_proxymanager
+ return super(_MutualTlsAdapter, self).proxy_manager_for(*args, **kwargs)
+
+
+class _MutualTlsOffloadAdapter(requests.adapters.HTTPAdapter):
+ """
+ A TransportAdapter that enables mutual TLS and offloads the client side
+ signing operation to the signing library.
+
+ Args:
+ enterprise_cert_file_path (str): the path to a enterprise cert JSON
+ file. The file should contain the following field:
+
+ {
+ "libs": {
+ "signer_library": "...",
+ "offload_library": "..."
+ }
+ }
+
+ Raises:
+ ImportError: if certifi or pyOpenSSL is not installed
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS channel
+ creation failed for any reason.
+ """
+
+ def __init__(self, enterprise_cert_file_path):
+ import certifi
+ import urllib3.contrib.pyopenssl
+
+ from google.auth.transport import _custom_tls_signer
+
+ # Call inject_into_urllib3 to activate certificate checking. See the
+ # following links for more info:
+ # (1) doc: https://github.com/urllib3/urllib3/blob/cb9ebf8aac5d75f64c8551820d760b72b619beff/src/urllib3/contrib/pyopenssl.py#L31-L32
+ # (2) mTLS example: https://github.com/urllib3/urllib3/issues/474#issuecomment-253168415
+ urllib3.contrib.pyopenssl.inject_into_urllib3()
+
+ self.signer = _custom_tls_signer.CustomTlsSigner(enterprise_cert_file_path)
+ self.signer.load_libraries()
+
+ poolmanager = create_urllib3_context()
+ poolmanager.load_verify_locations(cafile=certifi.where())
+ self.signer.attach_to_ssl_context(poolmanager)
+ self._ctx_poolmanager = poolmanager
+
+ proxymanager = create_urllib3_context()
+ proxymanager.load_verify_locations(cafile=certifi.where())
+ self.signer.attach_to_ssl_context(proxymanager)
+ self._ctx_proxymanager = proxymanager
+
+ super(_MutualTlsOffloadAdapter, self).__init__()
+
+ def init_poolmanager(self, *args, **kwargs):
+ kwargs["ssl_context"] = self._ctx_poolmanager
+ super(_MutualTlsOffloadAdapter, self).init_poolmanager(*args, **kwargs)
+
+ def proxy_manager_for(self, *args, **kwargs):
+ kwargs["ssl_context"] = self._ctx_proxymanager
+ return super(_MutualTlsOffloadAdapter, self).proxy_manager_for(*args, **kwargs)
+
+
+class AuthorizedSession(requests.Session):
+ """A Requests Session class with credentials.
+
+ This class is used to perform requests to API endpoints that require
+ authorization::
+
+ from google.auth.transport.requests import AuthorizedSession
+
+ authed_session = AuthorizedSession(credentials)
+
+ response = authed_session.request(
+ 'GET', 'https://www.googleapis.com/storage/v1/b')
+
+
+ The underlying :meth:`request` implementation handles adding the
+ credentials' headers to the request and refreshing credentials as needed.
+
+ This class also supports mutual TLS via :meth:`configure_mtls_channel`
+ method. In order to use this method, the `GOOGLE_API_USE_CLIENT_CERTIFICATE`
+ environment variable must be explicitly set to ``true``, otherwise it does
+ nothing. Assume the environment is set to ``true``, the method behaves in the
+ following manner:
+
+ If client_cert_callback is provided, client certificate and private
+ key are loaded using the callback; if client_cert_callback is None,
+ application default SSL credentials will be used. Exceptions are raised if
+ there are problems with the certificate, private key, or the loading process,
+ so it should be called within a try/except block.
+
+ First we set the environment variable to ``true``, then create an :class:`AuthorizedSession`
+ instance and specify the endpoints::
+
+ regular_endpoint = 'https://pubsub.googleapis.com/v1/projects/{my_project_id}/topics'
+ mtls_endpoint = 'https://pubsub.mtls.googleapis.com/v1/projects/{my_project_id}/topics'
+
+ authed_session = AuthorizedSession(credentials)
+
+ Now we can pass a callback to :meth:`configure_mtls_channel`::
+
+ def my_cert_callback():
+ # some code to load client cert bytes and private key bytes, both in
+ # PEM format.
+ some_code_to_load_client_cert_and_key()
+ if loaded:
+ return cert, key
+ raise MyClientCertFailureException()
+
+ # Always call configure_mtls_channel within a try/except block.
+ try:
+ authed_session.configure_mtls_channel(my_cert_callback)
+ except:
+ # handle exceptions.
+
+ if authed_session.is_mtls:
+ response = authed_session.request('GET', mtls_endpoint)
+ else:
+ response = authed_session.request('GET', regular_endpoint)
+
+
+ You can alternatively use application default SSL credentials like this::
+
+ try:
+ authed_session.configure_mtls_channel()
+ except:
+ # handle exceptions.
+
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials to
+ add to the request.
+ refresh_status_codes (Sequence[int]): Which HTTP status codes indicate
+ that credentials should be refreshed and the request should be
+ retried.
+ max_refresh_attempts (int): The maximum number of times to attempt to
+ refresh the credentials and retry the request.
+ refresh_timeout (Optional[int]): The timeout value in seconds for
+ credential refresh HTTP requests.
+ auth_request (google.auth.transport.requests.Request):
+ (Optional) An instance of
+ :class:`~google.auth.transport.requests.Request` used when
+ refreshing credentials. If not passed,
+ an instance of :class:`~google.auth.transport.requests.Request`
+ is created.
+ default_host (Optional[str]): A host like "pubsub.googleapis.com".
+ This is used when a self-signed JWT is created from service
+ account credentials.
+ """
+
+ def __init__(
+ self,
+ credentials,
+ refresh_status_codes=transport.DEFAULT_REFRESH_STATUS_CODES,
+ max_refresh_attempts=transport.DEFAULT_MAX_REFRESH_ATTEMPTS,
+ refresh_timeout=None,
+ auth_request=None,
+ default_host=None,
+ ):
+ super(AuthorizedSession, self).__init__()
+ self.credentials = credentials
+ self._refresh_status_codes = refresh_status_codes
+ self._max_refresh_attempts = max_refresh_attempts
+ self._refresh_timeout = refresh_timeout
+ self._is_mtls = False
+ self._default_host = default_host
+
+ if auth_request is None:
+ self._auth_request_session = requests.Session()
+
+ # Using an adapter to make HTTP requests robust to network errors.
+ # This adapter retrys HTTP requests when network errors occur
+ # and the requests seems safely retryable.
+ retry_adapter = requests.adapters.HTTPAdapter(max_retries=3)
+ self._auth_request_session.mount("https://", retry_adapter)
+
+ # Do not pass `self` as the session here, as it can lead to
+ # infinite recursion.
+ auth_request = Request(self._auth_request_session)
+ else:
+ self._auth_request_session = None
+
+ # Request instance used by internal methods (for example,
+ # credentials.refresh).
+ self._auth_request = auth_request
+
+ # https://google.aip.dev/auth/4111
+ # Attempt to use self-signed JWTs when a service account is used.
+ if isinstance(self.credentials, service_account.Credentials):
+ self.credentials._create_self_signed_jwt(
+ "https://{}/".format(self._default_host) if self._default_host else None
+ )
+
+ def configure_mtls_channel(self, client_cert_callback=None):
+ """Configure the client certificate and key for SSL connection.
+
+ The function does nothing unless `GOOGLE_API_USE_CLIENT_CERTIFICATE` is
+ explicitly set to `true`. In this case if client certificate and key are
+ successfully obtained (from the given client_cert_callback or from application
+ default SSL credentials), a :class:`_MutualTlsAdapter` instance will be mounted
+ to "https://" prefix.
+
+ Args:
+ client_cert_callback (Optional[Callable[[], (bytes, bytes)]]):
+ The optional callback returns the client certificate and private
+ key bytes both in PEM format.
+ If the callback is None, application default SSL credentials
+ will be used.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS channel
+ creation failed for any reason.
+ """
+ use_client_cert = os.getenv(
+ environment_vars.GOOGLE_API_USE_CLIENT_CERTIFICATE, "false"
+ )
+ if use_client_cert != "true":
+ self._is_mtls = False
+ return
+
+ try:
+ import OpenSSL
+ except ImportError as caught_exc:
+ new_exc = exceptions.MutualTLSChannelError(caught_exc)
+ raise new_exc from caught_exc
+
+ try:
+ (
+ self._is_mtls,
+ cert,
+ key,
+ ) = google.auth.transport._mtls_helper.get_client_cert_and_key(
+ client_cert_callback
+ )
+
+ if self._is_mtls:
+ mtls_adapter = _MutualTlsAdapter(cert, key)
+ self.mount("https://", mtls_adapter)
+ except (
+ exceptions.ClientCertError,
+ ImportError,
+ OpenSSL.crypto.Error,
+ ) as caught_exc:
+ new_exc = exceptions.MutualTLSChannelError(caught_exc)
+ raise new_exc from caught_exc
+
+ def request(
+ self,
+ method,
+ url,
+ data=None,
+ headers=None,
+ max_allowed_time=None,
+ timeout=_DEFAULT_TIMEOUT,
+ **kwargs
+ ):
+ """Implementation of Requests' request.
+
+ Args:
+ timeout (Optional[Union[float, Tuple[float, float]]]):
+ The amount of time in seconds to wait for the server response
+ with each individual request. Can also be passed as a tuple
+ ``(connect_timeout, read_timeout)``. See :meth:`requests.Session.request`
+ documentation for details.
+ max_allowed_time (Optional[float]):
+ If the method runs longer than this, a ``Timeout`` exception is
+ automatically raised. Unlike the ``timeout`` parameter, this
+ value applies to the total method execution time, even if
+ multiple requests are made under the hood.
+
+ Mind that it is not guaranteed that the timeout error is raised
+ at ``max_allowed_time``. It might take longer, for example, if
+ an underlying request takes a lot of time, but the request
+ itself does not timeout, e.g. if a large file is being
+ transmitted. The timout error will be raised after such
+ request completes.
+ """
+ # pylint: disable=arguments-differ
+ # Requests has a ton of arguments to request, but only two
+ # (method, url) are required. We pass through all of the other
+ # arguments to super, so no need to exhaustively list them here.
+
+ # Use a kwarg for this instead of an attribute to maintain
+ # thread-safety.
+ _credential_refresh_attempt = kwargs.pop("_credential_refresh_attempt", 0)
+
+ # Make a copy of the headers. They will be modified by the credentials
+ # and we want to pass the original headers if we recurse.
+ request_headers = headers.copy() if headers is not None else {}
+
+ # Do not apply the timeout unconditionally in order to not override the
+ # _auth_request's default timeout.
+ auth_request = (
+ self._auth_request
+ if timeout is None
+ else functools.partial(self._auth_request, timeout=timeout)
+ )
+
+ remaining_time = max_allowed_time
+
+ with TimeoutGuard(remaining_time) as guard:
+ self.credentials.before_request(auth_request, method, url, request_headers)
+ remaining_time = guard.remaining_timeout
+
+ with TimeoutGuard(remaining_time) as guard:
+ response = super(AuthorizedSession, self).request(
+ method,
+ url,
+ data=data,
+ headers=request_headers,
+ timeout=timeout,
+ **kwargs
+ )
+ remaining_time = guard.remaining_timeout
+
+ # If the response indicated that the credentials needed to be
+ # refreshed, then refresh the credentials and re-attempt the
+ # request.
+ # A stored token may expire between the time it is retrieved and
+ # the time the request is made, so we may need to try twice.
+ if (
+ response.status_code in self._refresh_status_codes
+ and _credential_refresh_attempt < self._max_refresh_attempts
+ ):
+
+ _LOGGER.info(
+ "Refreshing credentials due to a %s response. Attempt %s/%s.",
+ response.status_code,
+ _credential_refresh_attempt + 1,
+ self._max_refresh_attempts,
+ )
+
+ # Do not apply the timeout unconditionally in order to not override the
+ # _auth_request's default timeout.
+ auth_request = (
+ self._auth_request
+ if timeout is None
+ else functools.partial(self._auth_request, timeout=timeout)
+ )
+
+ with TimeoutGuard(remaining_time) as guard:
+ self.credentials.refresh(auth_request)
+ remaining_time = guard.remaining_timeout
+
+ # Recurse. Pass in the original headers, not our modified set, but
+ # do pass the adjusted max allowed time (i.e. the remaining total time).
+ return self.request(
+ method,
+ url,
+ data=data,
+ headers=headers,
+ max_allowed_time=remaining_time,
+ timeout=timeout,
+ _credential_refresh_attempt=_credential_refresh_attempt + 1,
+ **kwargs
+ )
+
+ return response
+
+ @property
+ def is_mtls(self):
+ """Indicates if the created SSL channel is mutual TLS."""
+ return self._is_mtls
+
+ def close(self):
+ if self._auth_request_session is not None:
+ self._auth_request_session.close()
+ super(AuthorizedSession, self).close()
diff --git a/Lib/site-packages/google/auth/transport/urllib3.py b/Lib/site-packages/google/auth/transport/urllib3.py
new file mode 100644
index 0000000..63144f5
--- /dev/null
+++ b/Lib/site-packages/google/auth/transport/urllib3.py
@@ -0,0 +1,444 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Transport adapter for urllib3."""
+
+from __future__ import absolute_import
+
+import logging
+import os
+import warnings
+
+# Certifi is Mozilla's certificate bundle. Urllib3 needs a certificate bundle
+# to verify HTTPS requests, and certifi is the recommended and most reliable
+# way to get a root certificate bundle. See
+# http://urllib3.readthedocs.io/en/latest/user-guide.html\
+# #certificate-verification
+# For more details.
+try:
+ import certifi
+except ImportError: # pragma: NO COVER
+ certifi = None # type: ignore
+
+try:
+ import urllib3 # type: ignore
+ import urllib3.exceptions # type: ignore
+except ImportError as caught_exc: # pragma: NO COVER
+ raise ImportError(
+ "The urllib3 library is not installed from please install the "
+ "urllib3 package to use the urllib3 transport."
+ ) from caught_exc
+
+from packaging import version # type: ignore
+
+from google.auth import environment_vars
+from google.auth import exceptions
+from google.auth import transport
+from google.oauth2 import service_account
+
+if version.parse(urllib3.__version__) >= version.parse("2.0.0"): # pragma: NO COVER
+ RequestMethods = urllib3._request_methods.RequestMethods # type: ignore
+else: # pragma: NO COVER
+ RequestMethods = urllib3.request.RequestMethods # type: ignore
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class _Response(transport.Response):
+ """urllib3 transport response adapter.
+
+ Args:
+ response (urllib3.response.HTTPResponse): The raw urllib3 response.
+ """
+
+ def __init__(self, response):
+ self._response = response
+
+ @property
+ def status(self):
+ return self._response.status
+
+ @property
+ def headers(self):
+ return self._response.headers
+
+ @property
+ def data(self):
+ return self._response.data
+
+
+class Request(transport.Request):
+ """urllib3 request adapter.
+
+ This class is used internally for making requests using various transports
+ in a consistent way. If you use :class:`AuthorizedHttp` you do not need
+ to construct or use this class directly.
+
+ This class can be useful if you want to manually refresh a
+ :class:`~google.auth.credentials.Credentials` instance::
+
+ import google.auth.transport.urllib3
+ import urllib3
+
+ http = urllib3.PoolManager()
+ request = google.auth.transport.urllib3.Request(http)
+
+ credentials.refresh(request)
+
+ Args:
+ http (urllib3.request.RequestMethods): An instance of any urllib3
+ class that implements :class:`~urllib3.request.RequestMethods`,
+ usually :class:`urllib3.PoolManager`.
+
+ .. automethod:: __call__
+ """
+
+ def __init__(self, http):
+ self.http = http
+
+ def __call__(
+ self, url, method="GET", body=None, headers=None, timeout=None, **kwargs
+ ):
+ """Make an HTTP request using urllib3.
+
+ Args:
+ url (str): The URI to be requested.
+ method (str): The HTTP method to use for the request. Defaults
+ to 'GET'.
+ body (bytes): The payload / body in HTTP request.
+ headers (Mapping[str, str]): Request headers.
+ timeout (Optional[int]): The number of seconds to wait for a
+ response from the server. If not specified or if None, the
+ urllib3 default timeout will be used.
+ kwargs: Additional arguments passed throught to the underlying
+ urllib3 :meth:`urlopen` method.
+
+ Returns:
+ google.auth.transport.Response: The HTTP response.
+
+ Raises:
+ google.auth.exceptions.TransportError: If any exception occurred.
+ """
+ # urllib3 uses a sentinel default value for timeout, so only set it if
+ # specified.
+ if timeout is not None:
+ kwargs["timeout"] = timeout
+
+ try:
+ _LOGGER.debug("Making request: %s %s", method, url)
+ response = self.http.request(
+ method, url, body=body, headers=headers, **kwargs
+ )
+ return _Response(response)
+ except urllib3.exceptions.HTTPError as caught_exc:
+ new_exc = exceptions.TransportError(caught_exc)
+ raise new_exc from caught_exc
+
+
+def _make_default_http():
+ if certifi is not None:
+ return urllib3.PoolManager(cert_reqs="CERT_REQUIRED", ca_certs=certifi.where())
+ else:
+ return urllib3.PoolManager()
+
+
+def _make_mutual_tls_http(cert, key):
+ """Create a mutual TLS HTTP connection with the given client cert and key.
+ See https://github.com/urllib3/urllib3/issues/474#issuecomment-253168415
+
+ Args:
+ cert (bytes): client certificate in PEM format
+ key (bytes): client private key in PEM format
+
+ Returns:
+ urllib3.PoolManager: Mutual TLS HTTP connection.
+
+ Raises:
+ ImportError: If certifi or pyOpenSSL is not installed.
+ OpenSSL.crypto.Error: If the cert or key is invalid.
+ """
+ import certifi
+ from OpenSSL import crypto
+ import urllib3.contrib.pyopenssl # type: ignore
+
+ urllib3.contrib.pyopenssl.inject_into_urllib3()
+ ctx = urllib3.util.ssl_.create_urllib3_context()
+ ctx.load_verify_locations(cafile=certifi.where())
+
+ pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key)
+ x509 = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
+
+ ctx._ctx.use_certificate(x509)
+ ctx._ctx.use_privatekey(pkey)
+
+ http = urllib3.PoolManager(ssl_context=ctx)
+ return http
+
+
+class AuthorizedHttp(RequestMethods): # type: ignore
+ """A urllib3 HTTP class with credentials.
+
+ This class is used to perform requests to API endpoints that require
+ authorization::
+
+ from google.auth.transport.urllib3 import AuthorizedHttp
+
+ authed_http = AuthorizedHttp(credentials)
+
+ response = authed_http.request(
+ 'GET', 'https://www.googleapis.com/storage/v1/b')
+
+ This class implements :class:`urllib3.request.RequestMethods` and can be
+ used just like any other :class:`urllib3.PoolManager`.
+
+ The underlying :meth:`urlopen` implementation handles adding the
+ credentials' headers to the request and refreshing credentials as needed.
+
+ This class also supports mutual TLS via :meth:`configure_mtls_channel`
+ method. In order to use this method, the `GOOGLE_API_USE_CLIENT_CERTIFICATE`
+ environment variable must be explicitly set to `true`, otherwise it does
+ nothing. Assume the environment is set to `true`, the method behaves in the
+ following manner:
+ If client_cert_callback is provided, client certificate and private
+ key are loaded using the callback; if client_cert_callback is None,
+ application default SSL credentials will be used. Exceptions are raised if
+ there are problems with the certificate, private key, or the loading process,
+ so it should be called within a try/except block.
+
+ First we set the environment variable to `true`, then create an :class:`AuthorizedHttp`
+ instance and specify the endpoints::
+
+ regular_endpoint = 'https://pubsub.googleapis.com/v1/projects/{my_project_id}/topics'
+ mtls_endpoint = 'https://pubsub.mtls.googleapis.com/v1/projects/{my_project_id}/topics'
+
+ authed_http = AuthorizedHttp(credentials)
+
+ Now we can pass a callback to :meth:`configure_mtls_channel`::
+
+ def my_cert_callback():
+ # some code to load client cert bytes and private key bytes, both in
+ # PEM format.
+ some_code_to_load_client_cert_and_key()
+ if loaded:
+ return cert, key
+ raise MyClientCertFailureException()
+
+ # Always call configure_mtls_channel within a try/except block.
+ try:
+ is_mtls = authed_http.configure_mtls_channel(my_cert_callback)
+ except:
+ # handle exceptions.
+
+ if is_mtls:
+ response = authed_http.request('GET', mtls_endpoint)
+ else:
+ response = authed_http.request('GET', regular_endpoint)
+
+ You can alternatively use application default SSL credentials like this::
+
+ try:
+ is_mtls = authed_http.configure_mtls_channel()
+ except:
+ # handle exceptions.
+
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials to
+ add to the request.
+ http (urllib3.PoolManager): The underlying HTTP object to
+ use to make requests. If not specified, a
+ :class:`urllib3.PoolManager` instance will be constructed with
+ sane defaults.
+ refresh_status_codes (Sequence[int]): Which HTTP status codes indicate
+ that credentials should be refreshed and the request should be
+ retried.
+ max_refresh_attempts (int): The maximum number of times to attempt to
+ refresh the credentials and retry the request.
+ default_host (Optional[str]): A host like "pubsub.googleapis.com".
+ This is used when a self-signed JWT is created from service
+ account credentials.
+ """
+
+ def __init__(
+ self,
+ credentials,
+ http=None,
+ refresh_status_codes=transport.DEFAULT_REFRESH_STATUS_CODES,
+ max_refresh_attempts=transport.DEFAULT_MAX_REFRESH_ATTEMPTS,
+ default_host=None,
+ ):
+ if http is None:
+ self.http = _make_default_http()
+ self._has_user_provided_http = False
+ else:
+ self.http = http
+ self._has_user_provided_http = True
+
+ self.credentials = credentials
+ self._refresh_status_codes = refresh_status_codes
+ self._max_refresh_attempts = max_refresh_attempts
+ self._default_host = default_host
+ # Request instance used by internal methods (for example,
+ # credentials.refresh).
+ self._request = Request(self.http)
+
+ # https://google.aip.dev/auth/4111
+ # Attempt to use self-signed JWTs when a service account is used.
+ if isinstance(self.credentials, service_account.Credentials):
+ self.credentials._create_self_signed_jwt(
+ "https://{}/".format(self._default_host) if self._default_host else None
+ )
+
+ super(AuthorizedHttp, self).__init__()
+
+ def configure_mtls_channel(self, client_cert_callback=None):
+ """Configures mutual TLS channel using the given client_cert_callback or
+ application default SSL credentials. The behavior is controlled by
+ `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable.
+ (1) If the environment variable value is `true`, the function returns True
+ if the channel is mutual TLS and False otherwise. The `http` provided
+ in the constructor will be overwritten.
+ (2) If the environment variable is not set or `false`, the function does
+ nothing and it always return False.
+
+ Args:
+ client_cert_callback (Optional[Callable[[], (bytes, bytes)]]):
+ The optional callback returns the client certificate and private
+ key bytes both in PEM format.
+ If the callback is None, application default SSL credentials
+ will be used.
+
+ Returns:
+ True if the channel is mutual TLS and False otherwise.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS channel
+ creation failed for any reason.
+ """
+ use_client_cert = os.getenv(
+ environment_vars.GOOGLE_API_USE_CLIENT_CERTIFICATE, "false"
+ )
+ if use_client_cert != "true":
+ return False
+
+ try:
+ import OpenSSL
+ except ImportError as caught_exc:
+ new_exc = exceptions.MutualTLSChannelError(caught_exc)
+ raise new_exc from caught_exc
+
+ try:
+ found_cert_key, cert, key = transport._mtls_helper.get_client_cert_and_key(
+ client_cert_callback
+ )
+
+ if found_cert_key:
+ self.http = _make_mutual_tls_http(cert, key)
+ else:
+ self.http = _make_default_http()
+ except (
+ exceptions.ClientCertError,
+ ImportError,
+ OpenSSL.crypto.Error,
+ ) as caught_exc:
+ new_exc = exceptions.MutualTLSChannelError(caught_exc)
+ raise new_exc from caught_exc
+
+ if self._has_user_provided_http:
+ self._has_user_provided_http = False
+ warnings.warn(
+ "`http` provided in the constructor is overwritten", UserWarning
+ )
+
+ return found_cert_key
+
+ def urlopen(self, method, url, body=None, headers=None, **kwargs):
+ """Implementation of urllib3's urlopen."""
+ # pylint: disable=arguments-differ
+ # We use kwargs to collect additional args that we don't need to
+ # introspect here. However, we do explicitly collect the two
+ # positional arguments.
+
+ # Use a kwarg for this instead of an attribute to maintain
+ # thread-safety.
+ _credential_refresh_attempt = kwargs.pop("_credential_refresh_attempt", 0)
+
+ if headers is None:
+ headers = self.headers
+
+ # Make a copy of the headers. They will be modified by the credentials
+ # and we want to pass the original headers if we recurse.
+ request_headers = headers.copy()
+
+ self.credentials.before_request(self._request, method, url, request_headers)
+
+ response = self.http.urlopen(
+ method, url, body=body, headers=request_headers, **kwargs
+ )
+
+ # If the response indicated that the credentials needed to be
+ # refreshed, then refresh the credentials and re-attempt the
+ # request.
+ # A stored token may expire between the time it is retrieved and
+ # the time the request is made, so we may need to try twice.
+ # The reason urllib3's retries aren't used is because they
+ # don't allow you to modify the request headers. :/
+ if (
+ response.status in self._refresh_status_codes
+ and _credential_refresh_attempt < self._max_refresh_attempts
+ ):
+
+ _LOGGER.info(
+ "Refreshing credentials due to a %s response. Attempt %s/%s.",
+ response.status,
+ _credential_refresh_attempt + 1,
+ self._max_refresh_attempts,
+ )
+
+ self.credentials.refresh(self._request)
+
+ # Recurse. Pass in the original headers, not our modified set.
+ return self.urlopen(
+ method,
+ url,
+ body=body,
+ headers=headers,
+ _credential_refresh_attempt=_credential_refresh_attempt + 1,
+ **kwargs
+ )
+
+ return response
+
+ # Proxy methods for compliance with the urllib3.PoolManager interface
+
+ def __enter__(self):
+ """Proxy to ``self.http``."""
+ return self.http.__enter__()
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ """Proxy to ``self.http``."""
+ return self.http.__exit__(exc_type, exc_val, exc_tb)
+
+ def __del__(self):
+ if hasattr(self, "http") and self.http is not None:
+ self.http.clear()
+
+ @property
+ def headers(self):
+ """Proxy to ``self.http``."""
+ return self.http.headers
+
+ @headers.setter
+ def headers(self, value):
+ """Proxy to ``self.http``."""
+ self.http.headers = value
diff --git a/Lib/site-packages/google/auth/version.py b/Lib/site-packages/google/auth/version.py
new file mode 100644
index 0000000..e1fa722
--- /dev/null
+++ b/Lib/site-packages/google/auth/version.py
@@ -0,0 +1,15 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__version__ = "2.27.0"
diff --git a/Lib/site-packages/google/cloud/extended_operations.proto b/Lib/site-packages/google/cloud/extended_operations.proto
new file mode 100644
index 0000000..1477d2d
--- /dev/null
+++ b/Lib/site-packages/google/cloud/extended_operations.proto
@@ -0,0 +1,150 @@
+// Copyright 2021 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file contains custom annotations that are used by GAPIC generators to
+// handle Long Running Operation methods (LRO) that are NOT compliant with
+// https://google.aip.dev/151. These annotations are public for technical
+// reasons only. Please DO NOT USE them in your protos.
+syntax = "proto3";
+
+package google.cloud;
+
+import "google/protobuf/descriptor.proto";
+
+option go_package = "google.golang.org/genproto/googleapis/cloud/extendedops;extendedops";
+option java_multiple_files = true;
+option java_outer_classname = "ExtendedOperationsProto";
+option java_package = "com.google.cloud";
+option objc_class_prefix = "GAPI";
+
+// FieldOptions to match corresponding fields in the initial request,
+// polling request and operation response messages.
+//
+// Example:
+//
+// In an API-specific operation message:
+//
+// message MyOperation {
+// string http_error_message = 1 [(operation_field) = ERROR_MESSAGE];
+// int32 http_error_status_code = 2 [(operation_field) = ERROR_CODE];
+// string id = 3 [(operation_field) = NAME];
+// Status status = 4 [(operation_field) = STATUS];
+// }
+//
+// In a polling request message (the one which is used to poll for an LRO
+// status):
+//
+// message MyPollingRequest {
+// string operation = 1 [(operation_response_field) = "id"];
+// string project = 2;
+// string region = 3;
+// }
+//
+// In an initial request message (the one which starts an LRO):
+//
+// message MyInitialRequest {
+// string my_project = 2 [(operation_request_field) = "project"];
+// string my_region = 3 [(operation_request_field) = "region"];
+// }
+//
+extend google.protobuf.FieldOptions {
+ // A field annotation that maps fields in an API-specific Operation object to
+ // their standard counterparts in google.longrunning.Operation. See
+ // OperationResponseMapping enum definition.
+ OperationResponseMapping operation_field = 1149;
+
+ // A field annotation that maps fields in the initial request message
+ // (the one which started the LRO) to their counterparts in the polling
+ // request message. For non-standard LRO, the polling response may be missing
+ // some of the information needed to make a subsequent polling request. The
+ // missing information (for example, project or region ID) is contained in the
+ // fields of the initial request message that this annotation must be applied
+ // to. The string value of the annotation corresponds to the name of the
+ // counterpart field in the polling request message that the annotated field's
+ // value will be copied to.
+ string operation_request_field = 1150;
+
+ // A field annotation that maps fields in the polling request message to their
+ // counterparts in the initial and/or polling response message. The initial
+ // and the polling methods return an API-specific Operation object. Some of
+ // the fields from that response object must be reused in the subsequent
+ // request (like operation name/ID) to fully identify the polled operation.
+ // This annotation must be applied to the fields in the polling request
+ // message, the string value of the annotation must correspond to the name of
+ // the counterpart field in the Operation response object whose value will be
+ // copied to the annotated field.
+ string operation_response_field = 1151;
+}
+
+// MethodOptions to identify the actual service and method used for operation
+// status polling.
+//
+// Example:
+//
+// In a method, which starts an LRO:
+//
+// service MyService {
+// rpc Foo(MyInitialRequest) returns (MyOperation) {
+// option (operation_service) = "MyPollingService";
+// }
+// }
+//
+// In a polling method:
+//
+// service MyPollingService {
+// rpc Get(MyPollingRequest) returns (MyOperation) {
+// option (operation_polling_method) = true;
+// }
+// }
+extend google.protobuf.MethodOptions {
+ // A method annotation that maps an LRO method (the one which starts an LRO)
+ // to the service, which will be used to poll for the operation status. The
+ // annotation must be applied to the method which starts an LRO, the string
+ // value of the annotation must correspond to the name of the service used to
+ // poll for the operation status.
+ string operation_service = 1249;
+
+ // A method annotation that marks methods that can be used for polling
+ // operation status (e.g. the MyPollingService.Get(MyPollingRequest) method).
+ bool operation_polling_method = 1250;
+}
+
+// An enum to be used to mark the essential (for polling) fields in an
+// API-specific Operation object. A custom Operation object may contain many
+// different fields, but only few of them are essential to conduct a successful
+// polling process.
+enum OperationResponseMapping {
+ // Do not use.
+ UNDEFINED = 0;
+
+ // A field in an API-specific (custom) Operation object which carries the same
+ // meaning as google.longrunning.Operation.name.
+ NAME = 1;
+
+ // A field in an API-specific (custom) Operation object which carries the same
+ // meaning as google.longrunning.Operation.done. If the annotated field is of
+ // an enum type, `annotated_field_name == EnumType.DONE` semantics should be
+ // equivalent to `Operation.done == true`. If the annotated field is of type
+ // boolean, then it should follow the same semantics as Operation.done.
+ // Otherwise, a non-empty value should be treated as `Operation.done == true`.
+ STATUS = 2;
+
+ // A field in an API-specific (custom) Operation object which carries the same
+ // meaning as google.longrunning.Operation.error.code.
+ ERROR_CODE = 3;
+
+ // A field in an API-specific (custom) Operation object which carries the same
+ // meaning as google.longrunning.Operation.error.message.
+ ERROR_MESSAGE = 4;
+}
\ No newline at end of file
diff --git a/Lib/site-packages/google/cloud/extended_operations_pb2.py b/Lib/site-packages/google/cloud/extended_operations_pb2.py
new file mode 100644
index 0000000..89e0c04
--- /dev/null
+++ b/Lib/site-packages/google/cloud/extended_operations_pb2.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/extended_operations.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b"\n&google/cloud/extended_operations.proto\x12\x0cgoogle.cloud\x1a google/protobuf/descriptor.proto*b\n\x18OperationResponseMapping\x12\r\n\tUNDEFINED\x10\x00\x12\x08\n\x04NAME\x10\x01\x12\n\n\x06STATUS\x10\x02\x12\x0e\n\nERROR_CODE\x10\x03\x12\x11\n\rERROR_MESSAGE\x10\x04:_\n\x0foperation_field\x12\x1d.google.protobuf.FieldOptions\x18\xfd\x08 \x01(\x0e\x32&.google.cloud.OperationResponseMapping:?\n\x17operation_request_field\x12\x1d.google.protobuf.FieldOptions\x18\xfe\x08 \x01(\t:@\n\x18operation_response_field\x12\x1d.google.protobuf.FieldOptions\x18\xff\x08 \x01(\t::\n\x11operation_service\x12\x1e.google.protobuf.MethodOptions\x18\xe1\t \x01(\t:A\n\x18operation_polling_method\x12\x1e.google.protobuf.MethodOptions\x18\xe2\t \x01(\x08\x42y\n\x10\x63om.google.cloudB\x17\x45xtendedOperationsProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/extendedops;extendedops\xa2\x02\x04GAPIb\x06proto3"
+)
+
+_OPERATIONRESPONSEMAPPING = DESCRIPTOR.enum_types_by_name["OperationResponseMapping"]
+OperationResponseMapping = enum_type_wrapper.EnumTypeWrapper(_OPERATIONRESPONSEMAPPING)
+UNDEFINED = 0
+NAME = 1
+STATUS = 2
+ERROR_CODE = 3
+ERROR_MESSAGE = 4
+
+OPERATION_FIELD_FIELD_NUMBER = 1149
+operation_field = DESCRIPTOR.extensions_by_name["operation_field"]
+OPERATION_REQUEST_FIELD_FIELD_NUMBER = 1150
+operation_request_field = DESCRIPTOR.extensions_by_name["operation_request_field"]
+OPERATION_RESPONSE_FIELD_FIELD_NUMBER = 1151
+operation_response_field = DESCRIPTOR.extensions_by_name["operation_response_field"]
+OPERATION_SERVICE_FIELD_NUMBER = 1249
+operation_service = DESCRIPTOR.extensions_by_name["operation_service"]
+OPERATION_POLLING_METHOD_FIELD_NUMBER = 1250
+operation_polling_method = DESCRIPTOR.extensions_by_name["operation_polling_method"]
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+ google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(
+ operation_field
+ )
+ google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(
+ operation_request_field
+ )
+ google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(
+ operation_response_field
+ )
+ google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(
+ operation_service
+ )
+ google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(
+ operation_polling_method
+ )
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\020com.google.cloudB\027ExtendedOperationsProtoP\001ZCgoogle.golang.org/genproto/googleapis/cloud/extendedops;extendedops\242\002\004GAPI"
+ _OPERATIONRESPONSEMAPPING._serialized_start = 90
+ _OPERATIONRESPONSEMAPPING._serialized_end = 188
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/cloud/location/locations.proto b/Lib/site-packages/google/cloud/location/locations.proto
new file mode 100644
index 0000000..a91766c
--- /dev/null
+++ b/Lib/site-packages/google/cloud/location/locations.proto
@@ -0,0 +1,108 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.cloud.location;
+
+import "google/api/annotations.proto";
+import "google/protobuf/any.proto";
+import "google/api/client.proto";
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/cloud/location;location";
+option java_multiple_files = true;
+option java_outer_classname = "LocationsProto";
+option java_package = "com.google.cloud.location";
+
+// An abstract interface that provides location-related information for
+// a service. Service-specific metadata is provided through the
+// [Location.metadata][google.cloud.location.Location.metadata] field.
+service Locations {
+ option (google.api.default_host) = "cloud.googleapis.com";
+ option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform";
+
+ // Lists information about the supported locations for this service.
+ rpc ListLocations(ListLocationsRequest) returns (ListLocationsResponse) {
+ option (google.api.http) = {
+ get: "/v1/{name=locations}"
+ additional_bindings {
+ get: "/v1/{name=projects/*}/locations"
+ }
+ };
+ }
+
+ // Gets information about a location.
+ rpc GetLocation(GetLocationRequest) returns (Location) {
+ option (google.api.http) = {
+ get: "/v1/{name=locations/*}"
+ additional_bindings {
+ get: "/v1/{name=projects/*/locations/*}"
+ }
+ };
+ }
+}
+
+// The request message for [Locations.ListLocations][google.cloud.location.Locations.ListLocations].
+message ListLocationsRequest {
+ // The resource that owns the locations collection, if applicable.
+ string name = 1;
+
+ // The standard list filter.
+ string filter = 2;
+
+ // The standard list page size.
+ int32 page_size = 3;
+
+ // The standard list page token.
+ string page_token = 4;
+}
+
+// The response message for [Locations.ListLocations][google.cloud.location.Locations.ListLocations].
+message ListLocationsResponse {
+ // A list of locations that matches the specified filter in the request.
+ repeated Location locations = 1;
+
+ // The standard List next-page token.
+ string next_page_token = 2;
+}
+
+// The request message for [Locations.GetLocation][google.cloud.location.Locations.GetLocation].
+message GetLocationRequest {
+ // Resource name for the location.
+ string name = 1;
+}
+
+// A resource that represents Google Cloud Platform location.
+message Location {
+ // Resource name for the location, which may vary between implementations.
+ // For example: `"projects/example-project/locations/us-east1"`
+ string name = 1;
+
+ // The canonical id for this location. For example: `"us-east1"`.
+ string location_id = 4;
+
+ // The friendly name for this location, typically a nearby city name.
+ // For example, "Tokyo".
+ string display_name = 5;
+
+ // Cross-service attributes for the location. For example
+ //
+ // {"cloud.googleapis.com/region": "us-east1"}
+ map labels = 2;
+
+ // Service-specific metadata. For example the available capacity at the given
+ // location.
+ google.protobuf.Any metadata = 3;
+}
diff --git a/Lib/site-packages/google/cloud/location/locations_pb2.py b/Lib/site-packages/google/cloud/location/locations_pb2.py
new file mode 100644
index 0000000..96d8a49
--- /dev/null
+++ b/Lib/site-packages/google/cloud/location/locations_pb2.py
@@ -0,0 +1,129 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/location/locations.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
+from google.api import client_pb2 as google_dot_api_dot_client__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n%google/cloud/location/locations.proto\x12\x15google.cloud.location\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/protobuf/any.proto\x1a\x17google/api/client.proto"[\n\x14ListLocationsRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"d\n\x15ListLocationsResponse\x12\x32\n\tlocations\x18\x01 \x03(\x0b\x32\x1f.google.cloud.location.Location\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t""\n\x12GetLocationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\xd7\x01\n\x08Location\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0blocation_id\x18\x04 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x05 \x01(\t\x12;\n\x06labels\x18\x02 \x03(\x0b\x32+.google.cloud.location.Location.LabelsEntry\x12&\n\x08metadata\x18\x03 \x01(\x0b\x32\x14.google.protobuf.Any\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x32\xa4\x03\n\tLocations\x12\xab\x01\n\rListLocations\x12+.google.cloud.location.ListLocationsRequest\x1a,.google.cloud.location.ListLocationsResponse"?\x82\xd3\xe4\x93\x02\x39\x12\x14/v1/{name=locations}Z!\x12\x1f/v1/{name=projects/*}/locations\x12\x9e\x01\n\x0bGetLocation\x12).google.cloud.location.GetLocationRequest\x1a\x1f.google.cloud.location.Location"C\x82\xd3\xe4\x93\x02=\x12\x16/v1/{name=locations/*}Z#\x12!/v1/{name=projects/*/locations/*}\x1aH\xca\x41\x14\x63loud.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBo\n\x19\x63om.google.cloud.locationB\x0eLocationsProtoP\x01Z=google.golang.org/genproto/googleapis/cloud/location;location\xf8\x01\x01\x62\x06proto3'
+)
+
+
+_LISTLOCATIONSREQUEST = DESCRIPTOR.message_types_by_name["ListLocationsRequest"]
+_LISTLOCATIONSRESPONSE = DESCRIPTOR.message_types_by_name["ListLocationsResponse"]
+_GETLOCATIONREQUEST = DESCRIPTOR.message_types_by_name["GetLocationRequest"]
+_LOCATION = DESCRIPTOR.message_types_by_name["Location"]
+_LOCATION_LABELSENTRY = _LOCATION.nested_types_by_name["LabelsEntry"]
+ListLocationsRequest = _reflection.GeneratedProtocolMessageType(
+ "ListLocationsRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LISTLOCATIONSREQUEST,
+ "__module__": "google.cloud.location.locations_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.location.ListLocationsRequest)
+ },
+)
+_sym_db.RegisterMessage(ListLocationsRequest)
+
+ListLocationsResponse = _reflection.GeneratedProtocolMessageType(
+ "ListLocationsResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LISTLOCATIONSRESPONSE,
+ "__module__": "google.cloud.location.locations_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.location.ListLocationsResponse)
+ },
+)
+_sym_db.RegisterMessage(ListLocationsResponse)
+
+GetLocationRequest = _reflection.GeneratedProtocolMessageType(
+ "GetLocationRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _GETLOCATIONREQUEST,
+ "__module__": "google.cloud.location.locations_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.location.GetLocationRequest)
+ },
+)
+_sym_db.RegisterMessage(GetLocationRequest)
+
+Location = _reflection.GeneratedProtocolMessageType(
+ "Location",
+ (_message.Message,),
+ {
+ "LabelsEntry": _reflection.GeneratedProtocolMessageType(
+ "LabelsEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LOCATION_LABELSENTRY,
+ "__module__": "google.cloud.location.locations_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.location.Location.LabelsEntry)
+ },
+ ),
+ "DESCRIPTOR": _LOCATION,
+ "__module__": "google.cloud.location.locations_pb2"
+ # @@protoc_insertion_point(class_scope:google.cloud.location.Location)
+ },
+)
+_sym_db.RegisterMessage(Location)
+_sym_db.RegisterMessage(Location.LabelsEntry)
+
+_LOCATIONS = DESCRIPTOR.services_by_name["Locations"]
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\031com.google.cloud.locationB\016LocationsProtoP\001Z=google.golang.org/genproto/googleapis/cloud/location;location\370\001\001"
+ _LOCATION_LABELSENTRY._options = None
+ _LOCATION_LABELSENTRY._serialized_options = b"8\001"
+ _LOCATIONS._options = None
+ _LOCATIONS._serialized_options = b"\312A\024cloud.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform"
+ _LOCATIONS.methods_by_name["ListLocations"]._options = None
+ _LOCATIONS.methods_by_name[
+ "ListLocations"
+ ]._serialized_options = b"\202\323\344\223\0029\022\024/v1/{name=locations}Z!\022\037/v1/{name=projects/*}/locations"
+ _LOCATIONS.methods_by_name["GetLocation"]._options = None
+ _LOCATIONS.methods_by_name[
+ "GetLocation"
+ ]._serialized_options = b"\202\323\344\223\002=\022\026/v1/{name=locations/*}Z#\022!/v1/{name=projects/*/locations/*}"
+ _LISTLOCATIONSREQUEST._serialized_start = 146
+ _LISTLOCATIONSREQUEST._serialized_end = 237
+ _LISTLOCATIONSRESPONSE._serialized_start = 239
+ _LISTLOCATIONSRESPONSE._serialized_end = 339
+ _GETLOCATIONREQUEST._serialized_start = 341
+ _GETLOCATIONREQUEST._serialized_end = 375
+ _LOCATION._serialized_start = 378
+ _LOCATION._serialized_end = 593
+ _LOCATION_LABELSENTRY._serialized_start = 548
+ _LOCATION_LABELSENTRY._serialized_end = 593
+ _LOCATIONS._serialized_start = 596
+ _LOCATIONS._serialized_end = 1016
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/gapic/metadata/gapic_metadata.proto b/Lib/site-packages/google/gapic/metadata/gapic_metadata.proto
new file mode 100644
index 0000000..16090ed
--- /dev/null
+++ b/Lib/site-packages/google/gapic/metadata/gapic_metadata.proto
@@ -0,0 +1,92 @@
+// Copyright 2020 Google LLC.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+syntax = "proto3";
+
+package google.gapic.metadata;
+
+option csharp_namespace = "Google.Gapic.Metadata";
+option go_package = "google.golang.org/genproto/googleapis/gapic/metadata;metadata";
+option java_multiple_files = true;
+option java_outer_classname = "GapicMetadataProto";
+option java_package = "com.google.gapic.metadata";
+option php_namespace = "Google\\Gapic\\Metadata";
+option ruby_package = "Google::Gapic::Metadata";
+
+// Metadata about a GAPIC library for a specific combination of API, version, and
+// computer language.
+message GapicMetadata {
+ // Schema version of this proto. Current value: 1.0
+ string schema = 1;
+
+ // Any human-readable comments to be included in this file.
+ string comment = 2;
+
+ // Computer language of this generated language. This must be
+ // spelled out as it spoken in English, with no capitalization or
+ // separators (e.g. "csharp", "nodejs").
+ string language = 3;
+
+ // The proto package containing the API definition for which this
+ // GAPIC library was generated.
+ string proto_package = 4;
+
+ // The language-specific library package for this GAPIC library.
+ string library_package = 5;
+
+ // A map from each proto-defined service to ServiceForTransports,
+ // which allows listing information about transport-specific
+ // implementations of the service.
+ //
+ // The key is the name of the service as it appears in the .proto
+ // file.
+ map services = 6;
+
+ // A map from a transport name to ServiceAsClient, which allows
+ // listing information about the client objects that implement the
+ // parent RPC service for the specified transport.
+ //
+ // The key name is the transport, lower-cased with no separators
+ // (e.g. "grpc", "rest").
+ message ServiceForTransport {
+ map clients = 1;
+ }
+
+ // Information about a specific client implementing a proto-defined service.
+ message ServiceAsClient {
+ // The name of the library client formatted as it appears in the source code
+ string library_client = 1;
+
+ // A mapping from each proto-defined RPC name to the the list of
+ // methods in library_client that implement it. There can be more
+ // than one library_client method for each RPC. RPCs with no
+ // library_client methods need not be included.
+ //
+ // The key name is the name of the RPC as defined and formated in
+ // the proto file.
+ map rpcs = 2;
+ }
+
+ // List of GAPIC client methods implementing the proto-defined RPC
+ // for the transport and service specified in the containing
+ // structures.
+ message MethodList {
+ // List of methods for a specific proto-service client in the
+ // GAPIC. These names should be formatted as they appear in the
+ // source code.
+ repeated string methods = 1;
+ }
+
+}
diff --git a/Lib/site-packages/google/gapic/metadata/gapic_metadata_pb2.py b/Lib/site-packages/google/gapic/metadata/gapic_metadata_pb2.py
new file mode 100644
index 0000000..d70605c
--- /dev/null
+++ b/Lib/site-packages/google/gapic/metadata/gapic_metadata_pb2.py
@@ -0,0 +1,144 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/gapic/metadata/gapic_metadata.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n*google/gapic/metadata/gapic_metadata.proto\x12\x15google.gapic.metadata"\xf0\x05\n\rGapicMetadata\x12\x0e\n\x06schema\x18\x01 \x01(\t\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12\x10\n\x08language\x18\x03 \x01(\t\x12\x15\n\rproto_package\x18\x04 \x01(\t\x12\x17\n\x0flibrary_package\x18\x05 \x01(\t\x12\x44\n\x08services\x18\x06 \x03(\x0b\x32\x32.google.gapic.metadata.GapicMetadata.ServicesEntry\x1ai\n\rServicesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12G\n\x05value\x18\x02 \x01(\x0b\x32\x38.google.gapic.metadata.GapicMetadata.ServiceForTransport:\x02\x38\x01\x1a\xd3\x01\n\x13ServiceForTransport\x12V\n\x07\x63lients\x18\x01 \x03(\x0b\x32\x45.google.gapic.metadata.GapicMetadata.ServiceForTransport.ClientsEntry\x1a\x64\n\x0c\x43lientsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x43\n\x05value\x18\x02 \x01(\x0b\x32\x34.google.gapic.metadata.GapicMetadata.ServiceAsClient:\x02\x38\x01\x1a\xd5\x01\n\x0fServiceAsClient\x12\x16\n\x0elibrary_client\x18\x01 \x01(\t\x12L\n\x04rpcs\x18\x02 \x03(\x0b\x32>.google.gapic.metadata.GapicMetadata.ServiceAsClient.RpcsEntry\x1a\\\n\tRpcsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12>\n\x05value\x18\x02 \x01(\x0b\x32/.google.gapic.metadata.GapicMetadata.MethodList:\x02\x38\x01\x1a\x1d\n\nMethodList\x12\x0f\n\x07methods\x18\x01 \x03(\tB\xba\x01\n\x19\x63om.google.gapic.metadataB\x12GapicMetadataProtoP\x01Z=google.golang.org/genproto/googleapis/gapic/metadata;metadata\xaa\x02\x15Google.Gapic.Metadata\xca\x02\x15Google\\Gapic\\Metadata\xea\x02\x17Google::Gapic::Metadatab\x06proto3'
+)
+
+
+_GAPICMETADATA = DESCRIPTOR.message_types_by_name["GapicMetadata"]
+_GAPICMETADATA_SERVICESENTRY = _GAPICMETADATA.nested_types_by_name["ServicesEntry"]
+_GAPICMETADATA_SERVICEFORTRANSPORT = _GAPICMETADATA.nested_types_by_name[
+ "ServiceForTransport"
+]
+_GAPICMETADATA_SERVICEFORTRANSPORT_CLIENTSENTRY = (
+ _GAPICMETADATA_SERVICEFORTRANSPORT.nested_types_by_name["ClientsEntry"]
+)
+_GAPICMETADATA_SERVICEASCLIENT = _GAPICMETADATA.nested_types_by_name["ServiceAsClient"]
+_GAPICMETADATA_SERVICEASCLIENT_RPCSENTRY = (
+ _GAPICMETADATA_SERVICEASCLIENT.nested_types_by_name["RpcsEntry"]
+)
+_GAPICMETADATA_METHODLIST = _GAPICMETADATA.nested_types_by_name["MethodList"]
+GapicMetadata = _reflection.GeneratedProtocolMessageType(
+ "GapicMetadata",
+ (_message.Message,),
+ {
+ "ServicesEntry": _reflection.GeneratedProtocolMessageType(
+ "ServicesEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _GAPICMETADATA_SERVICESENTRY,
+ "__module__": "google.gapic.metadata.gapic_metadata_pb2"
+ # @@protoc_insertion_point(class_scope:google.gapic.metadata.GapicMetadata.ServicesEntry)
+ },
+ ),
+ "ServiceForTransport": _reflection.GeneratedProtocolMessageType(
+ "ServiceForTransport",
+ (_message.Message,),
+ {
+ "ClientsEntry": _reflection.GeneratedProtocolMessageType(
+ "ClientsEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _GAPICMETADATA_SERVICEFORTRANSPORT_CLIENTSENTRY,
+ "__module__": "google.gapic.metadata.gapic_metadata_pb2"
+ # @@protoc_insertion_point(class_scope:google.gapic.metadata.GapicMetadata.ServiceForTransport.ClientsEntry)
+ },
+ ),
+ "DESCRIPTOR": _GAPICMETADATA_SERVICEFORTRANSPORT,
+ "__module__": "google.gapic.metadata.gapic_metadata_pb2"
+ # @@protoc_insertion_point(class_scope:google.gapic.metadata.GapicMetadata.ServiceForTransport)
+ },
+ ),
+ "ServiceAsClient": _reflection.GeneratedProtocolMessageType(
+ "ServiceAsClient",
+ (_message.Message,),
+ {
+ "RpcsEntry": _reflection.GeneratedProtocolMessageType(
+ "RpcsEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _GAPICMETADATA_SERVICEASCLIENT_RPCSENTRY,
+ "__module__": "google.gapic.metadata.gapic_metadata_pb2"
+ # @@protoc_insertion_point(class_scope:google.gapic.metadata.GapicMetadata.ServiceAsClient.RpcsEntry)
+ },
+ ),
+ "DESCRIPTOR": _GAPICMETADATA_SERVICEASCLIENT,
+ "__module__": "google.gapic.metadata.gapic_metadata_pb2"
+ # @@protoc_insertion_point(class_scope:google.gapic.metadata.GapicMetadata.ServiceAsClient)
+ },
+ ),
+ "MethodList": _reflection.GeneratedProtocolMessageType(
+ "MethodList",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _GAPICMETADATA_METHODLIST,
+ "__module__": "google.gapic.metadata.gapic_metadata_pb2"
+ # @@protoc_insertion_point(class_scope:google.gapic.metadata.GapicMetadata.MethodList)
+ },
+ ),
+ "DESCRIPTOR": _GAPICMETADATA,
+ "__module__": "google.gapic.metadata.gapic_metadata_pb2"
+ # @@protoc_insertion_point(class_scope:google.gapic.metadata.GapicMetadata)
+ },
+)
+_sym_db.RegisterMessage(GapicMetadata)
+_sym_db.RegisterMessage(GapicMetadata.ServicesEntry)
+_sym_db.RegisterMessage(GapicMetadata.ServiceForTransport)
+_sym_db.RegisterMessage(GapicMetadata.ServiceForTransport.ClientsEntry)
+_sym_db.RegisterMessage(GapicMetadata.ServiceAsClient)
+_sym_db.RegisterMessage(GapicMetadata.ServiceAsClient.RpcsEntry)
+_sym_db.RegisterMessage(GapicMetadata.MethodList)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\031com.google.gapic.metadataB\022GapicMetadataProtoP\001Z=google.golang.org/genproto/googleapis/gapic/metadata;metadata\252\002\025Google.Gapic.Metadata\312\002\025Google\\Gapic\\Metadata\352\002\027Google::Gapic::Metadata"
+ _GAPICMETADATA_SERVICESENTRY._options = None
+ _GAPICMETADATA_SERVICESENTRY._serialized_options = b"8\001"
+ _GAPICMETADATA_SERVICEFORTRANSPORT_CLIENTSENTRY._options = None
+ _GAPICMETADATA_SERVICEFORTRANSPORT_CLIENTSENTRY._serialized_options = b"8\001"
+ _GAPICMETADATA_SERVICEASCLIENT_RPCSENTRY._options = None
+ _GAPICMETADATA_SERVICEASCLIENT_RPCSENTRY._serialized_options = b"8\001"
+ _GAPICMETADATA._serialized_start = 70
+ _GAPICMETADATA._serialized_end = 822
+ _GAPICMETADATA_SERVICESENTRY._serialized_start = 256
+ _GAPICMETADATA_SERVICESENTRY._serialized_end = 361
+ _GAPICMETADATA_SERVICEFORTRANSPORT._serialized_start = 364
+ _GAPICMETADATA_SERVICEFORTRANSPORT._serialized_end = 575
+ _GAPICMETADATA_SERVICEFORTRANSPORT_CLIENTSENTRY._serialized_start = 475
+ _GAPICMETADATA_SERVICEFORTRANSPORT_CLIENTSENTRY._serialized_end = 575
+ _GAPICMETADATA_SERVICEASCLIENT._serialized_start = 578
+ _GAPICMETADATA_SERVICEASCLIENT._serialized_end = 791
+ _GAPICMETADATA_SERVICEASCLIENT_RPCSENTRY._serialized_start = 699
+ _GAPICMETADATA_SERVICEASCLIENT_RPCSENTRY._serialized_end = 791
+ _GAPICMETADATA_METHODLIST._serialized_start = 793
+ _GAPICMETADATA_METHODLIST._serialized_end = 822
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/logging/type/http_request.proto b/Lib/site-packages/google/logging/type/http_request.proto
new file mode 100644
index 0000000..425a09d
--- /dev/null
+++ b/Lib/site-packages/google/logging/type/http_request.proto
@@ -0,0 +1,95 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.logging.type;
+
+import "google/protobuf/duration.proto";
+
+option csharp_namespace = "Google.Cloud.Logging.Type";
+option go_package = "google.golang.org/genproto/googleapis/logging/type;ltype";
+option java_multiple_files = true;
+option java_outer_classname = "HttpRequestProto";
+option java_package = "com.google.logging.type";
+option php_namespace = "Google\\Cloud\\Logging\\Type";
+option ruby_package = "Google::Cloud::Logging::Type";
+
+// A common proto for logging HTTP requests. Only contains semantics
+// defined by the HTTP specification. Product-specific logging
+// information MUST be defined in a separate message.
+message HttpRequest {
+ // The request method. Examples: `"GET"`, `"HEAD"`, `"PUT"`, `"POST"`.
+ string request_method = 1;
+
+ // The scheme (http, https), the host name, the path and the query
+ // portion of the URL that was requested.
+ // Example: `"http://example.com/some/info?color=red"`.
+ string request_url = 2;
+
+ // The size of the HTTP request message in bytes, including the request
+ // headers and the request body.
+ int64 request_size = 3;
+
+ // The response code indicating the status of response.
+ // Examples: 200, 404.
+ int32 status = 4;
+
+ // The size of the HTTP response message sent back to the client, in bytes,
+ // including the response headers and the response body.
+ int64 response_size = 5;
+
+ // The user agent sent by the client. Example:
+ // `"Mozilla/4.0 (compatible; MSIE 6.0; Windows 98; Q312461; .NET
+ // CLR 1.0.3705)"`.
+ string user_agent = 6;
+
+ // The IP address (IPv4 or IPv6) of the client that issued the HTTP
+ // request. This field can include port information. Examples:
+ // `"192.168.1.1"`, `"10.0.0.1:80"`, `"FE80::0202:B3FF:FE1E:8329"`.
+ string remote_ip = 7;
+
+ // The IP address (IPv4 or IPv6) of the origin server that the request was
+ // sent to. This field can include port information. Examples:
+ // `"192.168.1.1"`, `"10.0.0.1:80"`, `"FE80::0202:B3FF:FE1E:8329"`.
+ string server_ip = 13;
+
+ // The referer URL of the request, as defined in
+ // [HTTP/1.1 Header Field
+ // Definitions](https://datatracker.ietf.org/doc/html/rfc2616#section-14.36).
+ string referer = 8;
+
+ // The request processing latency on the server, from the time the request was
+ // received until the response was sent.
+ google.protobuf.Duration latency = 14;
+
+ // Whether or not a cache lookup was attempted.
+ bool cache_lookup = 11;
+
+ // Whether or not an entity was served from cache
+ // (with or without validation).
+ bool cache_hit = 9;
+
+ // Whether or not the response was validated with the origin server before
+ // being served from cache. This field is only meaningful if `cache_hit` is
+ // True.
+ bool cache_validated_with_origin_server = 10;
+
+ // The number of HTTP response bytes inserted into cache. Set only when a
+ // cache fill was attempted.
+ int64 cache_fill_bytes = 12;
+
+ // Protocol used for the request. Examples: "HTTP/1.1", "HTTP/2", "websocket"
+ string protocol = 15;
+}
diff --git a/Lib/site-packages/google/logging/type/http_request_pb2.py b/Lib/site-packages/google/logging/type/http_request_pb2.py
new file mode 100644
index 0000000..6777b3c
--- /dev/null
+++ b/Lib/site-packages/google/logging/type/http_request_pb2.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/logging/type/http_request.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n&google/logging/type/http_request.proto\x12\x13google.logging.type\x1a\x1egoogle/protobuf/duration.proto"\xef\x02\n\x0bHttpRequest\x12\x16\n\x0erequest_method\x18\x01 \x01(\t\x12\x13\n\x0brequest_url\x18\x02 \x01(\t\x12\x14\n\x0crequest_size\x18\x03 \x01(\x03\x12\x0e\n\x06status\x18\x04 \x01(\x05\x12\x15\n\rresponse_size\x18\x05 \x01(\x03\x12\x12\n\nuser_agent\x18\x06 \x01(\t\x12\x11\n\tremote_ip\x18\x07 \x01(\t\x12\x11\n\tserver_ip\x18\r \x01(\t\x12\x0f\n\x07referer\x18\x08 \x01(\t\x12*\n\x07latency\x18\x0e \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x14\n\x0c\x63\x61\x63he_lookup\x18\x0b \x01(\x08\x12\x11\n\tcache_hit\x18\t \x01(\x08\x12*\n"cache_validated_with_origin_server\x18\n \x01(\x08\x12\x18\n\x10\x63\x61\x63he_fill_bytes\x18\x0c \x01(\x03\x12\x10\n\x08protocol\x18\x0f \x01(\tB\xbe\x01\n\x17\x63om.google.logging.typeB\x10HttpRequestProtoP\x01Z8google.golang.org/genproto/googleapis/logging/type;ltype\xaa\x02\x19Google.Cloud.Logging.Type\xca\x02\x19Google\\Cloud\\Logging\\Type\xea\x02\x1cGoogle::Cloud::Logging::Typeb\x06proto3'
+)
+
+
+_HTTPREQUEST = DESCRIPTOR.message_types_by_name["HttpRequest"]
+HttpRequest = _reflection.GeneratedProtocolMessageType(
+ "HttpRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _HTTPREQUEST,
+ "__module__": "google.logging.type.http_request_pb2"
+ # @@protoc_insertion_point(class_scope:google.logging.type.HttpRequest)
+ },
+)
+_sym_db.RegisterMessage(HttpRequest)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\027com.google.logging.typeB\020HttpRequestProtoP\001Z8google.golang.org/genproto/googleapis/logging/type;ltype\252\002\031Google.Cloud.Logging.Type\312\002\031Google\\Cloud\\Logging\\Type\352\002\034Google::Cloud::Logging::Type"
+ _HTTPREQUEST._serialized_start = 96
+ _HTTPREQUEST._serialized_end = 463
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/logging/type/log_severity.proto b/Lib/site-packages/google/logging/type/log_severity.proto
new file mode 100644
index 0000000..6740125
--- /dev/null
+++ b/Lib/site-packages/google/logging/type/log_severity.proto
@@ -0,0 +1,71 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.logging.type;
+
+option csharp_namespace = "Google.Cloud.Logging.Type";
+option go_package = "google.golang.org/genproto/googleapis/logging/type;ltype";
+option java_multiple_files = true;
+option java_outer_classname = "LogSeverityProto";
+option java_package = "com.google.logging.type";
+option objc_class_prefix = "GLOG";
+option php_namespace = "Google\\Cloud\\Logging\\Type";
+option ruby_package = "Google::Cloud::Logging::Type";
+
+// The severity of the event described in a log entry, expressed as one of the
+// standard severity levels listed below. For your reference, the levels are
+// assigned the listed numeric values. The effect of using numeric values other
+// than those listed is undefined.
+//
+// You can filter for log entries by severity. For example, the following
+// filter expression will match log entries with severities `INFO`, `NOTICE`,
+// and `WARNING`:
+//
+// severity > DEBUG AND severity <= WARNING
+//
+// If you are writing log entries, you should map other severity encodings to
+// one of these standard levels. For example, you might map all of Java's FINE,
+// FINER, and FINEST levels to `LogSeverity.DEBUG`. You can preserve the
+// original severity level in the log entry payload if you wish.
+enum LogSeverity {
+ // (0) The log entry has no assigned severity level.
+ DEFAULT = 0;
+
+ // (100) Debug or trace information.
+ DEBUG = 100;
+
+ // (200) Routine information, such as ongoing status or performance.
+ INFO = 200;
+
+ // (300) Normal but significant events, such as start up, shut down, or
+ // a configuration change.
+ NOTICE = 300;
+
+ // (400) Warning events might cause problems.
+ WARNING = 400;
+
+ // (500) Error events are likely to cause problems.
+ ERROR = 500;
+
+ // (600) Critical events cause more severe problems or outages.
+ CRITICAL = 600;
+
+ // (700) A person must take an action immediately.
+ ALERT = 700;
+
+ // (800) One or more systems are unusable.
+ EMERGENCY = 800;
+}
diff --git a/Lib/site-packages/google/logging/type/log_severity_pb2.py b/Lib/site-packages/google/logging/type/log_severity_pb2.py
new file mode 100644
index 0000000..7d3040c
--- /dev/null
+++ b/Lib/site-packages/google/logging/type/log_severity_pb2.py
@@ -0,0 +1,55 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/logging/type/log_severity.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b"\n&google/logging/type/log_severity.proto\x12\x13google.logging.type*\x82\x01\n\x0bLogSeverity\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x44\x45\x42UG\x10\x64\x12\t\n\x04INFO\x10\xc8\x01\x12\x0b\n\x06NOTICE\x10\xac\x02\x12\x0c\n\x07WARNING\x10\x90\x03\x12\n\n\x05\x45RROR\x10\xf4\x03\x12\r\n\x08\x43RITICAL\x10\xd8\x04\x12\n\n\x05\x41LERT\x10\xbc\x05\x12\x0e\n\tEMERGENCY\x10\xa0\x06\x42\xc5\x01\n\x17\x63om.google.logging.typeB\x10LogSeverityProtoP\x01Z8google.golang.org/genproto/googleapis/logging/type;ltype\xa2\x02\x04GLOG\xaa\x02\x19Google.Cloud.Logging.Type\xca\x02\x19Google\\Cloud\\Logging\\Type\xea\x02\x1cGoogle::Cloud::Logging::Typeb\x06proto3"
+)
+
+_LOGSEVERITY = DESCRIPTOR.enum_types_by_name["LogSeverity"]
+LogSeverity = enum_type_wrapper.EnumTypeWrapper(_LOGSEVERITY)
+DEFAULT = 0
+DEBUG = 100
+INFO = 200
+NOTICE = 300
+WARNING = 400
+ERROR = 500
+CRITICAL = 600
+ALERT = 700
+EMERGENCY = 800
+
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\027com.google.logging.typeB\020LogSeverityProtoP\001Z8google.golang.org/genproto/googleapis/logging/type;ltype\242\002\004GLOG\252\002\031Google.Cloud.Logging.Type\312\002\031Google\\Cloud\\Logging\\Type\352\002\034Google::Cloud::Logging::Type"
+ _LOGSEVERITY._serialized_start = 64
+ _LOGSEVERITY._serialized_end = 194
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/longrunning/operations.proto b/Lib/site-packages/google/longrunning/operations.proto
new file mode 100644
index 0000000..c8fda20
--- /dev/null
+++ b/Lib/site-packages/google/longrunning/operations.proto
@@ -0,0 +1,247 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.longrunning;
+
+import "google/api/annotations.proto";
+import "google/api/client.proto";
+import "google/protobuf/any.proto";
+import "google/protobuf/duration.proto";
+import "google/protobuf/empty.proto";
+import "google/rpc/status.proto";
+import "google/protobuf/descriptor.proto";
+
+option cc_enable_arenas = true;
+option csharp_namespace = "Google.LongRunning";
+option go_package = "cloud.google.com/go/longrunning/autogen/longrunningpb;longrunningpb";
+option java_multiple_files = true;
+option java_outer_classname = "OperationsProto";
+option java_package = "com.google.longrunning";
+option php_namespace = "Google\\LongRunning";
+
+extend google.protobuf.MethodOptions {
+ // Additional information regarding long-running operations.
+ // In particular, this specifies the types that are returned from
+ // long-running operations.
+ //
+ // Required for methods that return `google.longrunning.Operation`; invalid
+ // otherwise.
+ google.longrunning.OperationInfo operation_info = 1049;
+}
+
+// Manages long-running operations with an API service.
+//
+// When an API method normally takes long time to complete, it can be designed
+// to return [Operation][google.longrunning.Operation] to the client, and the client can use this
+// interface to receive the real response asynchronously by polling the
+// operation resource, or pass the operation resource to another API (such as
+// Google Cloud Pub/Sub API) to receive the response. Any API service that
+// returns long-running operations should implement the `Operations` interface
+// so developers can have a consistent client experience.
+service Operations {
+ option (google.api.default_host) = "longrunning.googleapis.com";
+
+ // Lists operations that match the specified filter in the request. If the
+ // server doesn't support this method, it returns `UNIMPLEMENTED`.
+ //
+ // NOTE: the `name` binding allows API services to override the binding
+ // to use different resource name schemes, such as `users/*/operations`. To
+ // override the binding, API services can add a binding such as
+ // `"/v1/{name=users/*}/operations"` to their service configuration.
+ // For backwards compatibility, the default name includes the operations
+ // collection id, however overriding users must ensure the name binding
+ // is the parent resource, without the operations collection id.
+ rpc ListOperations(ListOperationsRequest) returns (ListOperationsResponse) {
+ option (google.api.http) = {
+ get: "/v1/{name=operations}"
+ };
+ option (google.api.method_signature) = "name,filter";
+ }
+
+ // Gets the latest state of a long-running operation. Clients can use this
+ // method to poll the operation result at intervals as recommended by the API
+ // service.
+ rpc GetOperation(GetOperationRequest) returns (Operation) {
+ option (google.api.http) = {
+ get: "/v1/{name=operations/**}"
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Deletes a long-running operation. This method indicates that the client is
+ // no longer interested in the operation result. It does not cancel the
+ // operation. If the server doesn't support this method, it returns
+ // `google.rpc.Code.UNIMPLEMENTED`.
+ rpc DeleteOperation(DeleteOperationRequest) returns (google.protobuf.Empty) {
+ option (google.api.http) = {
+ delete: "/v1/{name=operations/**}"
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Starts asynchronous cancellation on a long-running operation. The server
+ // makes a best effort to cancel the operation, but success is not
+ // guaranteed. If the server doesn't support this method, it returns
+ // `google.rpc.Code.UNIMPLEMENTED`. Clients can use
+ // [Operations.GetOperation][google.longrunning.Operations.GetOperation] or
+ // other methods to check whether the cancellation succeeded or whether the
+ // operation completed despite cancellation. On successful cancellation,
+ // the operation is not deleted; instead, it becomes an operation with
+ // an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,
+ // corresponding to `Code.CANCELLED`.
+ rpc CancelOperation(CancelOperationRequest) returns (google.protobuf.Empty) {
+ option (google.api.http) = {
+ post: "/v1/{name=operations/**}:cancel"
+ body: "*"
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Waits until the specified long-running operation is done or reaches at most
+ // a specified timeout, returning the latest state. If the operation is
+ // already done, the latest state is immediately returned. If the timeout
+ // specified is greater than the default HTTP/RPC timeout, the HTTP/RPC
+ // timeout is used. If the server does not support this method, it returns
+ // `google.rpc.Code.UNIMPLEMENTED`.
+ // Note that this method is on a best-effort basis. It may return the latest
+ // state before the specified timeout (including immediately), meaning even an
+ // immediate response is no guarantee that the operation is done.
+ rpc WaitOperation(WaitOperationRequest) returns (Operation) {
+ }
+}
+
+// This resource represents a long-running operation that is the result of a
+// network API call.
+message Operation {
+ // The server-assigned name, which is only unique within the same service that
+ // originally returns it. If you use the default HTTP mapping, the
+ // `name` should be a resource name ending with `operations/{unique_id}`.
+ string name = 1;
+
+ // Service-specific metadata associated with the operation. It typically
+ // contains progress information and common metadata such as create time.
+ // Some services might not provide such metadata. Any method that returns a
+ // long-running operation should document the metadata type, if any.
+ google.protobuf.Any metadata = 2;
+
+ // If the value is `false`, it means the operation is still in progress.
+ // If `true`, the operation is completed, and either `error` or `response` is
+ // available.
+ bool done = 3;
+
+ // The operation result, which can be either an `error` or a valid `response`.
+ // If `done` == `false`, neither `error` nor `response` is set.
+ // If `done` == `true`, exactly one of `error` or `response` is set.
+ oneof result {
+ // The error result of the operation in case of failure or cancellation.
+ google.rpc.Status error = 4;
+
+ // The normal response of the operation in case of success. If the original
+ // method returns no data on success, such as `Delete`, the response is
+ // `google.protobuf.Empty`. If the original method is standard
+ // `Get`/`Create`/`Update`, the response should be the resource. For other
+ // methods, the response should have the type `XxxResponse`, where `Xxx`
+ // is the original method name. For example, if the original method name
+ // is `TakeSnapshot()`, the inferred response type is
+ // `TakeSnapshotResponse`.
+ google.protobuf.Any response = 5;
+ }
+}
+
+// The request message for [Operations.GetOperation][google.longrunning.Operations.GetOperation].
+message GetOperationRequest {
+ // The name of the operation resource.
+ string name = 1;
+}
+
+// The request message for [Operations.ListOperations][google.longrunning.Operations.ListOperations].
+message ListOperationsRequest {
+ // The name of the operation's parent resource.
+ string name = 4;
+
+ // The standard list filter.
+ string filter = 1;
+
+ // The standard list page size.
+ int32 page_size = 2;
+
+ // The standard list page token.
+ string page_token = 3;
+}
+
+// The response message for [Operations.ListOperations][google.longrunning.Operations.ListOperations].
+message ListOperationsResponse {
+ // A list of operations that matches the specified filter in the request.
+ repeated Operation operations = 1;
+
+ // The standard List next-page token.
+ string next_page_token = 2;
+}
+
+// The request message for [Operations.CancelOperation][google.longrunning.Operations.CancelOperation].
+message CancelOperationRequest {
+ // The name of the operation resource to be cancelled.
+ string name = 1;
+}
+
+// The request message for [Operations.DeleteOperation][google.longrunning.Operations.DeleteOperation].
+message DeleteOperationRequest {
+ // The name of the operation resource to be deleted.
+ string name = 1;
+}
+
+// The request message for [Operations.WaitOperation][google.longrunning.Operations.WaitOperation].
+message WaitOperationRequest {
+ // The name of the operation resource to wait on.
+ string name = 1;
+
+ // The maximum duration to wait before timing out. If left blank, the wait
+ // will be at most the time permitted by the underlying HTTP/RPC protocol.
+ // If RPC context deadline is also specified, the shorter one will be used.
+ google.protobuf.Duration timeout = 2;
+}
+
+// A message representing the message types used by a long-running operation.
+//
+// Example:
+//
+// rpc LongRunningRecognize(LongRunningRecognizeRequest)
+// returns (google.longrunning.Operation) {
+// option (google.longrunning.operation_info) = {
+// response_type: "LongRunningRecognizeResponse"
+// metadata_type: "LongRunningRecognizeMetadata"
+// };
+// }
+message OperationInfo {
+ // Required. The message name of the primary return type for this
+ // long-running operation.
+ // This type will be used to deserialize the LRO's response.
+ //
+ // If the response is in a different package from the rpc, a fully-qualified
+ // message name must be used (e.g. `google.protobuf.Struct`).
+ //
+ // Note: Altering this value constitutes a breaking change.
+ string response_type = 1;
+
+ // Required. The message name of the metadata type for this long-running
+ // operation.
+ //
+ // If the response is in a different package from the rpc, a fully-qualified
+ // message name must be used (e.g. `google.protobuf.Struct`).
+ //
+ // Note: Altering this value constitutes a breaking change.
+ string metadata_type = 2;
+}
diff --git a/Lib/site-packages/google/longrunning/operations_grpc.py b/Lib/site-packages/google/longrunning/operations_grpc.py
new file mode 100644
index 0000000..9089240
--- /dev/null
+++ b/Lib/site-packages/google/longrunning/operations_grpc.py
@@ -0,0 +1,19 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This module is provided for backwards compatibility with
+# googleapis-common-protos <= 1.5.0, where this was the import path.
+
+from __future__ import absolute_import
+from google.longrunning.operations_grpc_pb2 import *
diff --git a/Lib/site-packages/google/longrunning/operations_grpc_pb2.py b/Lib/site-packages/google/longrunning/operations_grpc_pb2.py
new file mode 100644
index 0000000..e03cbeb
--- /dev/null
+++ b/Lib/site-packages/google/longrunning/operations_grpc_pb2.py
@@ -0,0 +1,15 @@
+# This module is provided for backwards compatibility with
+# googleapis-common-protos <= 1.52.0, where this import path contained
+# all of the message and gRPC definitions.
+
+from google.longrunning.operations_proto_pb2 import *
+from google.longrunning.operations_proto_pb2 import _OPERATION
+from google.longrunning.operations_proto_pb2 import _OPERATION
+from google.longrunning.operations_proto_pb2 import _GETOPERATIONREQUEST
+from google.longrunning.operations_proto_pb2 import _LISTOPERATIONSREQUEST
+from google.longrunning.operations_proto_pb2 import _LISTOPERATIONSRESPONSE
+from google.longrunning.operations_proto_pb2 import _CANCELOPERATIONREQUEST
+from google.longrunning.operations_proto_pb2 import _DELETEOPERATIONREQUEST
+from google.longrunning.operations_proto_pb2 import _OPERATIONINFO
+from google.longrunning.operations_proto_pb2 import _OPERATIONS
+from google.longrunning.operations_pb2_grpc import *
diff --git a/Lib/site-packages/google/longrunning/operations_pb2.py b/Lib/site-packages/google/longrunning/operations_pb2.py
new file mode 100644
index 0000000..bcd9ed5
--- /dev/null
+++ b/Lib/site-packages/google/longrunning/operations_pb2.py
@@ -0,0 +1,42 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Safe implementation of long-running operations with and without gRPC.
+
+Multiplexes between versions of long-running operations with and without gRPC.
+The former is preferred, but not possible in all environments (such as Google
+AppEngine Standard).
+"""
+
+try:
+ from google.longrunning.operations_grpc_pb2 import *
+ from google.longrunning.operations_grpc_pb2 import _OPERATION
+ from google.longrunning.operations_grpc_pb2 import _GETOPERATIONREQUEST
+ from google.longrunning.operations_grpc_pb2 import _LISTOPERATIONSREQUEST
+ from google.longrunning.operations_grpc_pb2 import _LISTOPERATIONSRESPONSE
+ from google.longrunning.operations_grpc_pb2 import _CANCELOPERATIONREQUEST
+ from google.longrunning.operations_grpc_pb2 import _DELETEOPERATIONREQUEST
+ from google.longrunning.operations_grpc_pb2 import _OPERATIONINFO
+ from google.longrunning.operations_grpc_pb2 import _OPERATIONS
+except ImportError:
+ from google.longrunning.operations_proto_pb2 import *
+ from google.longrunning.operations_proto_pb2 import _OPERATION
+ from google.longrunning.operations_proto_pb2 import _OPERATION
+ from google.longrunning.operations_proto_pb2 import _GETOPERATIONREQUEST
+ from google.longrunning.operations_proto_pb2 import _LISTOPERATIONSREQUEST
+ from google.longrunning.operations_proto_pb2 import _LISTOPERATIONSRESPONSE
+ from google.longrunning.operations_proto_pb2 import _CANCELOPERATIONREQUEST
+ from google.longrunning.operations_proto_pb2 import _DELETEOPERATIONREQUEST
+ from google.longrunning.operations_proto_pb2 import _OPERATIONINFO
+ from google.longrunning.operations_proto_pb2 import _OPERATIONS
diff --git a/Lib/site-packages/google/longrunning/operations_pb2_grpc.py b/Lib/site-packages/google/longrunning/operations_pb2_grpc.py
new file mode 100644
index 0000000..d840270
--- /dev/null
+++ b/Lib/site-packages/google/longrunning/operations_pb2_grpc.py
@@ -0,0 +1,341 @@
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
+from google.longrunning import (
+ operations_proto_pb2 as google_dot_longrunning_dot_operations__pb2,
+)
+from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
+
+
+class OperationsStub(object):
+ """Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be designed
+ to return [Operation][google.longrunning.Operation] to the client, and the client can use this
+ interface to receive the real response asynchronously by polling the
+ operation resource, or pass the operation resource to another API (such as
+ Google Cloud Pub/Sub API) to receive the response. Any API service that
+ returns long-running operations should implement the `Operations` interface
+ so developers can have a consistent client experience.
+ """
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.ListOperations = channel.unary_unary(
+ "/google.longrunning.Operations/ListOperations",
+ request_serializer=google_dot_longrunning_dot_operations__pb2.ListOperationsRequest.SerializeToString,
+ response_deserializer=google_dot_longrunning_dot_operations__pb2.ListOperationsResponse.FromString,
+ )
+ self.GetOperation = channel.unary_unary(
+ "/google.longrunning.Operations/GetOperation",
+ request_serializer=google_dot_longrunning_dot_operations__pb2.GetOperationRequest.SerializeToString,
+ response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ )
+ self.DeleteOperation = channel.unary_unary(
+ "/google.longrunning.Operations/DeleteOperation",
+ request_serializer=google_dot_longrunning_dot_operations__pb2.DeleteOperationRequest.SerializeToString,
+ response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ )
+ self.CancelOperation = channel.unary_unary(
+ "/google.longrunning.Operations/CancelOperation",
+ request_serializer=google_dot_longrunning_dot_operations__pb2.CancelOperationRequest.SerializeToString,
+ response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ )
+ self.WaitOperation = channel.unary_unary(
+ "/google.longrunning.Operations/WaitOperation",
+ request_serializer=google_dot_longrunning_dot_operations__pb2.WaitOperationRequest.SerializeToString,
+ response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ )
+
+
+class OperationsServicer(object):
+ """Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be designed
+ to return [Operation][google.longrunning.Operation] to the client, and the client can use this
+ interface to receive the real response asynchronously by polling the
+ operation resource, or pass the operation resource to another API (such as
+ Google Cloud Pub/Sub API) to receive the response. Any API service that
+ returns long-running operations should implement the `Operations` interface
+ so developers can have a consistent client experience.
+ """
+
+ def ListOperations(self, request, context):
+ """Lists operations that match the specified filter in the request. If the
+ server doesn't support this method, it returns `UNIMPLEMENTED`.
+
+ NOTE: the `name` binding allows API services to override the binding
+ to use different resource name schemes, such as `users/*/operations`. To
+ override the binding, API services can add a binding such as
+ `"/v1/{name=users/*}/operations"` to their service configuration.
+ For backwards compatibility, the default name includes the operations
+ collection id, however overriding users must ensure the name binding
+ is the parent resource, without the operations collection id.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def GetOperation(self, request, context):
+ """Gets the latest state of a long-running operation. Clients can use this
+ method to poll the operation result at intervals as recommended by the API
+ service.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def DeleteOperation(self, request, context):
+ """Deletes a long-running operation. This method indicates that the client is
+ no longer interested in the operation result. It does not cancel the
+ operation. If the server doesn't support this method, it returns
+ `google.rpc.Code.UNIMPLEMENTED`.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def CancelOperation(self, request, context):
+ """Starts asynchronous cancellation on a long-running operation. The server
+ makes a best effort to cancel the operation, but success is not
+ guaranteed. If the server doesn't support this method, it returns
+ `google.rpc.Code.UNIMPLEMENTED`. Clients can use
+ [Operations.GetOperation][google.longrunning.Operations.GetOperation] or
+ other methods to check whether the cancellation succeeded or whether the
+ operation completed despite cancellation. On successful cancellation,
+ the operation is not deleted; instead, it becomes an operation with
+ an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,
+ corresponding to `Code.CANCELLED`.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def WaitOperation(self, request, context):
+ """Waits until the specified long-running operation is done or reaches at most
+ a specified timeout, returning the latest state. If the operation is
+ already done, the latest state is immediately returned. If the timeout
+ specified is greater than the default HTTP/RPC timeout, the HTTP/RPC
+ timeout is used. If the server does not support this method, it returns
+ `google.rpc.Code.UNIMPLEMENTED`.
+ Note that this method is on a best-effort basis. It may return the latest
+ state before the specified timeout (including immediately), meaning even an
+ immediate response is no guarantee that the operation is done.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+
+def add_OperationsServicer_to_server(servicer, server):
+ rpc_method_handlers = {
+ "ListOperations": grpc.unary_unary_rpc_method_handler(
+ servicer.ListOperations,
+ request_deserializer=google_dot_longrunning_dot_operations__pb2.ListOperationsRequest.FromString,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.ListOperationsResponse.SerializeToString,
+ ),
+ "GetOperation": grpc.unary_unary_rpc_method_handler(
+ servicer.GetOperation,
+ request_deserializer=google_dot_longrunning_dot_operations__pb2.GetOperationRequest.FromString,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ "DeleteOperation": grpc.unary_unary_rpc_method_handler(
+ servicer.DeleteOperation,
+ request_deserializer=google_dot_longrunning_dot_operations__pb2.DeleteOperationRequest.FromString,
+ response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ ),
+ "CancelOperation": grpc.unary_unary_rpc_method_handler(
+ servicer.CancelOperation,
+ request_deserializer=google_dot_longrunning_dot_operations__pb2.CancelOperationRequest.FromString,
+ response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ ),
+ "WaitOperation": grpc.unary_unary_rpc_method_handler(
+ servicer.WaitOperation,
+ request_deserializer=google_dot_longrunning_dot_operations__pb2.WaitOperationRequest.FromString,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ "google.longrunning.Operations", rpc_method_handlers
+ )
+ server.add_generic_rpc_handlers((generic_handler,))
+
+
+# This class is part of an EXPERIMENTAL API.
+class Operations(object):
+ """Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be designed
+ to return [Operation][google.longrunning.Operation] to the client, and the client can use this
+ interface to receive the real response asynchronously by polling the
+ operation resource, or pass the operation resource to another API (such as
+ Google Cloud Pub/Sub API) to receive the response. Any API service that
+ returns long-running operations should implement the `Operations` interface
+ so developers can have a consistent client experience.
+ """
+
+ @staticmethod
+ def ListOperations(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.longrunning.Operations/ListOperations",
+ google_dot_longrunning_dot_operations__pb2.ListOperationsRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.ListOperationsResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def GetOperation(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.longrunning.Operations/GetOperation",
+ google_dot_longrunning_dot_operations__pb2.GetOperationRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def DeleteOperation(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.longrunning.Operations/DeleteOperation",
+ google_dot_longrunning_dot_operations__pb2.DeleteOperationRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def CancelOperation(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.longrunning.Operations/CancelOperation",
+ google_dot_longrunning_dot_operations__pb2.CancelOperationRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def WaitOperation(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.longrunning.Operations/WaitOperation",
+ google_dot_longrunning_dot_operations__pb2.WaitOperationRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
diff --git a/Lib/site-packages/google/longrunning/operations_proto.py b/Lib/site-packages/google/longrunning/operations_proto.py
new file mode 100644
index 0000000..1da5cf7
--- /dev/null
+++ b/Lib/site-packages/google/longrunning/operations_proto.py
@@ -0,0 +1,5 @@
+# This module is provided for backwards compatibility with
+# googleapis-common-protos <= 1.5.0, where this was the import path.
+
+from __future__ import absolute_import
+from google.longrunning.operations_proto_pb2 import *
diff --git a/Lib/site-packages/google/longrunning/operations_proto_pb2.py b/Lib/site-packages/google/longrunning/operations_proto_pb2.py
new file mode 100644
index 0000000..c4459d0
--- /dev/null
+++ b/Lib/site-packages/google/longrunning/operations_proto_pb2.py
@@ -0,0 +1,196 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/longrunning/operations.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+from google.api import client_pb2 as google_dot_api_dot_client__pb2
+from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
+from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
+from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
+from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2
+from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n#google/longrunning/operations.proto\x12\x12google.longrunning\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x17google/rpc/status.proto\x1a google/protobuf/descriptor.proto"\xa8\x01\n\tOperation\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x08metadata\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\x12#\n\x05\x65rror\x18\x04 \x01(\x0b\x32\x12.google.rpc.StatusH\x00\x12(\n\x08response\x18\x05 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x42\x08\n\x06result"#\n\x13GetOperationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x15ListOperationsRequest\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"d\n\x16ListOperationsResponse\x12\x31\n\noperations\x18\x01 \x03(\x0b\x32\x1d.google.longrunning.Operation\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"&\n\x16\x43\x61ncelOperationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"&\n\x16\x44\x65leteOperationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"P\n\x14WaitOperationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12*\n\x07timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"=\n\rOperationInfo\x12\x15\n\rresponse_type\x18\x01 \x01(\t\x12\x15\n\rmetadata_type\x18\x02 \x01(\t2\xaa\x05\n\nOperations\x12\x94\x01\n\x0eListOperations\x12).google.longrunning.ListOperationsRequest\x1a*.google.longrunning.ListOperationsResponse"+\x82\xd3\xe4\x93\x02\x17\x12\x15/v1/{name=operations}\xda\x41\x0bname,filter\x12\x7f\n\x0cGetOperation\x12\'.google.longrunning.GetOperationRequest\x1a\x1d.google.longrunning.Operation"\'\x82\xd3\xe4\x93\x02\x1a\x12\x18/v1/{name=operations/**}\xda\x41\x04name\x12~\n\x0f\x44\x65leteOperation\x12*.google.longrunning.DeleteOperationRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02\x1a*\x18/v1/{name=operations/**}\xda\x41\x04name\x12\x88\x01\n\x0f\x43\x61ncelOperation\x12*.google.longrunning.CancelOperationRequest\x1a\x16.google.protobuf.Empty"1\x82\xd3\xe4\x93\x02$"\x1f/v1/{name=operations/**}:cancel:\x01*\xda\x41\x04name\x12Z\n\rWaitOperation\x12(.google.longrunning.WaitOperationRequest\x1a\x1d.google.longrunning.Operation"\x00\x1a\x1d\xca\x41\x1alongrunning.googleapis.com:Z\n\x0eoperation_info\x12\x1e.google.protobuf.MethodOptions\x18\x99\x08 \x01(\x0b\x32!.google.longrunning.OperationInfoB\x9d\x01\n\x16\x63om.google.longrunningB\x0fOperationsProtoP\x01ZCcloud.google.com/go/longrunning/autogen/longrunningpb;longrunningpb\xf8\x01\x01\xaa\x02\x12Google.LongRunning\xca\x02\x12Google\\LongRunningb\x06proto3'
+)
+
+
+OPERATION_INFO_FIELD_NUMBER = 1049
+operation_info = DESCRIPTOR.extensions_by_name["operation_info"]
+
+_OPERATION = DESCRIPTOR.message_types_by_name["Operation"]
+_GETOPERATIONREQUEST = DESCRIPTOR.message_types_by_name["GetOperationRequest"]
+_LISTOPERATIONSREQUEST = DESCRIPTOR.message_types_by_name["ListOperationsRequest"]
+_LISTOPERATIONSRESPONSE = DESCRIPTOR.message_types_by_name["ListOperationsResponse"]
+_CANCELOPERATIONREQUEST = DESCRIPTOR.message_types_by_name["CancelOperationRequest"]
+_DELETEOPERATIONREQUEST = DESCRIPTOR.message_types_by_name["DeleteOperationRequest"]
+_WAITOPERATIONREQUEST = DESCRIPTOR.message_types_by_name["WaitOperationRequest"]
+_OPERATIONINFO = DESCRIPTOR.message_types_by_name["OperationInfo"]
+Operation = _reflection.GeneratedProtocolMessageType(
+ "Operation",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _OPERATION,
+ "__module__": "google.longrunning.operations_pb2"
+ # @@protoc_insertion_point(class_scope:google.longrunning.Operation)
+ },
+)
+_sym_db.RegisterMessage(Operation)
+
+GetOperationRequest = _reflection.GeneratedProtocolMessageType(
+ "GetOperationRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _GETOPERATIONREQUEST,
+ "__module__": "google.longrunning.operations_pb2"
+ # @@protoc_insertion_point(class_scope:google.longrunning.GetOperationRequest)
+ },
+)
+_sym_db.RegisterMessage(GetOperationRequest)
+
+ListOperationsRequest = _reflection.GeneratedProtocolMessageType(
+ "ListOperationsRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LISTOPERATIONSREQUEST,
+ "__module__": "google.longrunning.operations_pb2"
+ # @@protoc_insertion_point(class_scope:google.longrunning.ListOperationsRequest)
+ },
+)
+_sym_db.RegisterMessage(ListOperationsRequest)
+
+ListOperationsResponse = _reflection.GeneratedProtocolMessageType(
+ "ListOperationsResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LISTOPERATIONSRESPONSE,
+ "__module__": "google.longrunning.operations_pb2"
+ # @@protoc_insertion_point(class_scope:google.longrunning.ListOperationsResponse)
+ },
+)
+_sym_db.RegisterMessage(ListOperationsResponse)
+
+CancelOperationRequest = _reflection.GeneratedProtocolMessageType(
+ "CancelOperationRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _CANCELOPERATIONREQUEST,
+ "__module__": "google.longrunning.operations_pb2"
+ # @@protoc_insertion_point(class_scope:google.longrunning.CancelOperationRequest)
+ },
+)
+_sym_db.RegisterMessage(CancelOperationRequest)
+
+DeleteOperationRequest = _reflection.GeneratedProtocolMessageType(
+ "DeleteOperationRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _DELETEOPERATIONREQUEST,
+ "__module__": "google.longrunning.operations_pb2"
+ # @@protoc_insertion_point(class_scope:google.longrunning.DeleteOperationRequest)
+ },
+)
+_sym_db.RegisterMessage(DeleteOperationRequest)
+
+WaitOperationRequest = _reflection.GeneratedProtocolMessageType(
+ "WaitOperationRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _WAITOPERATIONREQUEST,
+ "__module__": "google.longrunning.operations_pb2"
+ # @@protoc_insertion_point(class_scope:google.longrunning.WaitOperationRequest)
+ },
+)
+_sym_db.RegisterMessage(WaitOperationRequest)
+
+OperationInfo = _reflection.GeneratedProtocolMessageType(
+ "OperationInfo",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _OPERATIONINFO,
+ "__module__": "google.longrunning.operations_pb2"
+ # @@protoc_insertion_point(class_scope:google.longrunning.OperationInfo)
+ },
+)
+_sym_db.RegisterMessage(OperationInfo)
+
+_OPERATIONS = DESCRIPTOR.services_by_name["Operations"]
+if _descriptor._USE_C_DESCRIPTORS == False:
+ google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(
+ operation_info
+ )
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\026com.google.longrunningB\017OperationsProtoP\001ZCcloud.google.com/go/longrunning/autogen/longrunningpb;longrunningpb\370\001\001\252\002\022Google.LongRunning\312\002\022Google\\LongRunning"
+ _OPERATIONS._options = None
+ _OPERATIONS._serialized_options = b"\312A\032longrunning.googleapis.com"
+ _OPERATIONS.methods_by_name["ListOperations"]._options = None
+ _OPERATIONS.methods_by_name[
+ "ListOperations"
+ ]._serialized_options = (
+ b"\202\323\344\223\002\027\022\025/v1/{name=operations}\332A\013name,filter"
+ )
+ _OPERATIONS.methods_by_name["GetOperation"]._options = None
+ _OPERATIONS.methods_by_name[
+ "GetOperation"
+ ]._serialized_options = (
+ b"\202\323\344\223\002\032\022\030/v1/{name=operations/**}\332A\004name"
+ )
+ _OPERATIONS.methods_by_name["DeleteOperation"]._options = None
+ _OPERATIONS.methods_by_name[
+ "DeleteOperation"
+ ]._serialized_options = (
+ b"\202\323\344\223\002\032*\030/v1/{name=operations/**}\332A\004name"
+ )
+ _OPERATIONS.methods_by_name["CancelOperation"]._options = None
+ _OPERATIONS.methods_by_name[
+ "CancelOperation"
+ ]._serialized_options = (
+ b'\202\323\344\223\002$"\037/v1/{name=operations/**}:cancel:\001*\332A\004name'
+ )
+ _OPERATION._serialized_start = 262
+ _OPERATION._serialized_end = 430
+ _GETOPERATIONREQUEST._serialized_start = 432
+ _GETOPERATIONREQUEST._serialized_end = 467
+ _LISTOPERATIONSREQUEST._serialized_start = 469
+ _LISTOPERATIONSREQUEST._serialized_end = 561
+ _LISTOPERATIONSRESPONSE._serialized_start = 563
+ _LISTOPERATIONSRESPONSE._serialized_end = 663
+ _CANCELOPERATIONREQUEST._serialized_start = 665
+ _CANCELOPERATIONREQUEST._serialized_end = 703
+ _DELETEOPERATIONREQUEST._serialized_start = 705
+ _DELETEOPERATIONREQUEST._serialized_end = 743
+ _WAITOPERATIONREQUEST._serialized_start = 745
+ _WAITOPERATIONREQUEST._serialized_end = 825
+ _OPERATIONINFO._serialized_start = 827
+ _OPERATIONINFO._serialized_end = 888
+ _OPERATIONS._serialized_start = 891
+ _OPERATIONS._serialized_end = 1573
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/oauth2/__init__.py b/Lib/site-packages/google/oauth2/__init__.py
new file mode 100644
index 0000000..accae96
--- /dev/null
+++ b/Lib/site-packages/google/oauth2/__init__.py
@@ -0,0 +1,36 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google OAuth 2.0 Library for Python."""
+
+import sys
+import warnings
+
+
+class Python37DeprecationWarning(DeprecationWarning): # pragma: NO COVER
+ """
+ Deprecation warning raised when Python 3.7 runtime is detected.
+ Python 3.7 support will be dropped after January 1, 2024.
+ """
+
+ pass
+
+
+# Checks if the current runtime is Python 3.7.
+if sys.version_info.major == 3 and sys.version_info.minor == 7: # pragma: NO COVER
+ message = (
+ "After January 1, 2024, new releases of this library will drop support "
+ "for Python 3.7."
+ )
+ warnings.warn(message, Python37DeprecationWarning)
diff --git a/Lib/site-packages/google/oauth2/_client.py b/Lib/site-packages/google/oauth2/_client.py
new file mode 100644
index 0000000..d2af6c8
--- /dev/null
+++ b/Lib/site-packages/google/oauth2/_client.py
@@ -0,0 +1,507 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""OAuth 2.0 client.
+
+This is a client for interacting with an OAuth 2.0 authorization server's
+token endpoint.
+
+For more information about the token endpoint, see
+`Section 3.1 of rfc6749`_
+
+.. _Section 3.1 of rfc6749: https://tools.ietf.org/html/rfc6749#section-3.2
+"""
+
+import datetime
+import http.client as http_client
+import json
+import urllib
+
+from google.auth import _exponential_backoff
+from google.auth import _helpers
+from google.auth import exceptions
+from google.auth import jwt
+from google.auth import metrics
+from google.auth import transport
+
+_URLENCODED_CONTENT_TYPE = "application/x-www-form-urlencoded"
+_JSON_CONTENT_TYPE = "application/json"
+_JWT_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:jwt-bearer"
+_REFRESH_GRANT_TYPE = "refresh_token"
+_IAM_IDTOKEN_ENDPOINT = (
+ "https://iamcredentials.googleapis.com/v1/"
+ + "projects/-/serviceAccounts/{}:generateIdToken"
+)
+
+
+def _handle_error_response(response_data, retryable_error):
+ """Translates an error response into an exception.
+
+ Args:
+ response_data (Mapping | str): The decoded response data.
+ retryable_error Optional[bool]: A boolean indicating if an error is retryable.
+ Defaults to False.
+
+ Raises:
+ google.auth.exceptions.RefreshError: The errors contained in response_data.
+ """
+
+ retryable_error = retryable_error if retryable_error else False
+
+ if isinstance(response_data, str):
+ raise exceptions.RefreshError(response_data, retryable=retryable_error)
+ try:
+ error_details = "{}: {}".format(
+ response_data["error"], response_data.get("error_description")
+ )
+ # If no details could be extracted, use the response data.
+ except (KeyError, ValueError):
+ error_details = json.dumps(response_data)
+
+ raise exceptions.RefreshError(
+ error_details, response_data, retryable=retryable_error
+ )
+
+
+def _can_retry(status_code, response_data):
+ """Checks if a request can be retried by inspecting the status code
+ and response body of the request.
+
+ Args:
+ status_code (int): The response status code.
+ response_data (Mapping | str): The decoded response data.
+
+ Returns:
+ bool: True if the response is retryable. False otherwise.
+ """
+ if status_code in transport.DEFAULT_RETRYABLE_STATUS_CODES:
+ return True
+
+ try:
+ # For a failed response, response_body could be a string
+ error_desc = response_data.get("error_description") or ""
+ error_code = response_data.get("error") or ""
+
+ if not isinstance(error_code, str) or not isinstance(error_desc, str):
+ return False
+
+ # Per Oauth 2.0 RFC https://www.rfc-editor.org/rfc/rfc6749.html#section-4.1.2.1
+ # This is needed because a redirect will not return a 500 status code.
+ retryable_error_descriptions = {
+ "internal_failure",
+ "server_error",
+ "temporarily_unavailable",
+ }
+
+ if any(e in retryable_error_descriptions for e in (error_code, error_desc)):
+ return True
+
+ except AttributeError:
+ pass
+
+ return False
+
+
+def _parse_expiry(response_data):
+ """Parses the expiry field from a response into a datetime.
+
+ Args:
+ response_data (Mapping): The JSON-parsed response data.
+
+ Returns:
+ Optional[datetime]: The expiration or ``None`` if no expiration was
+ specified.
+ """
+ expires_in = response_data.get("expires_in", None)
+
+ if expires_in is not None:
+ # Some services do not respect the OAUTH2.0 RFC and send expires_in as a
+ # JSON String.
+ if isinstance(expires_in, str):
+ expires_in = int(expires_in)
+
+ return _helpers.utcnow() + datetime.timedelta(seconds=expires_in)
+ else:
+ return None
+
+
+def _token_endpoint_request_no_throw(
+ request,
+ token_uri,
+ body,
+ access_token=None,
+ use_json=False,
+ can_retry=True,
+ headers=None,
+ **kwargs
+):
+ """Makes a request to the OAuth 2.0 authorization server's token endpoint.
+ This function doesn't throw on response errors.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
+ URI.
+ body (Mapping[str, str]): The parameters to send in the request body.
+ access_token (Optional(str)): The access token needed to make the request.
+ use_json (Optional(bool)): Use urlencoded format or json format for the
+ content type. The default value is False.
+ can_retry (bool): Enable or disable request retry behavior.
+ headers (Optional[Mapping[str, str]]): The headers for the request.
+ kwargs: Additional arguments passed on to the request method. The
+ kwargs will be passed to `requests.request` method, see:
+ https://docs.python-requests.org/en/latest/api/#requests.request.
+ For example, you can use `cert=("cert_pem_path", "key_pem_path")`
+ to set up client side SSL certificate, and use
+ `verify="ca_bundle_path"` to set up the CA certificates for sever
+ side SSL certificate verification.
+
+ Returns:
+ Tuple(bool, Mapping[str, str], Optional[bool]): A boolean indicating
+ if the request is successful, a mapping for the JSON-decoded response
+ data and in the case of an error a boolean indicating if the error
+ is retryable.
+ """
+ if use_json:
+ headers_to_use = {"Content-Type": _JSON_CONTENT_TYPE}
+ body = json.dumps(body).encode("utf-8")
+ else:
+ headers_to_use = {"Content-Type": _URLENCODED_CONTENT_TYPE}
+ body = urllib.parse.urlencode(body).encode("utf-8")
+
+ if access_token:
+ headers_to_use["Authorization"] = "Bearer {}".format(access_token)
+
+ if headers:
+ headers_to_use.update(headers)
+
+ def _perform_request():
+ response = request(
+ method="POST", url=token_uri, headers=headers_to_use, body=body, **kwargs
+ )
+ response_body = (
+ response.data.decode("utf-8")
+ if hasattr(response.data, "decode")
+ else response.data
+ )
+ response_data = ""
+ try:
+ # response_body should be a JSON
+ response_data = json.loads(response_body)
+ except ValueError:
+ response_data = response_body
+
+ if response.status == http_client.OK:
+ return True, response_data, None
+
+ retryable_error = _can_retry(
+ status_code=response.status, response_data=response_data
+ )
+
+ return False, response_data, retryable_error
+
+ request_succeeded, response_data, retryable_error = _perform_request()
+
+ if request_succeeded or not retryable_error or not can_retry:
+ return request_succeeded, response_data, retryable_error
+
+ retries = _exponential_backoff.ExponentialBackoff()
+ for _ in retries:
+ request_succeeded, response_data, retryable_error = _perform_request()
+ if request_succeeded or not retryable_error:
+ return request_succeeded, response_data, retryable_error
+
+ return False, response_data, retryable_error
+
+
+def _token_endpoint_request(
+ request,
+ token_uri,
+ body,
+ access_token=None,
+ use_json=False,
+ can_retry=True,
+ headers=None,
+ **kwargs
+):
+ """Makes a request to the OAuth 2.0 authorization server's token endpoint.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
+ URI.
+ body (Mapping[str, str]): The parameters to send in the request body.
+ access_token (Optional(str)): The access token needed to make the request.
+ use_json (Optional(bool)): Use urlencoded format or json format for the
+ content type. The default value is False.
+ can_retry (bool): Enable or disable request retry behavior.
+ headers (Optional[Mapping[str, str]]): The headers for the request.
+ kwargs: Additional arguments passed on to the request method. The
+ kwargs will be passed to `requests.request` method, see:
+ https://docs.python-requests.org/en/latest/api/#requests.request.
+ For example, you can use `cert=("cert_pem_path", "key_pem_path")`
+ to set up client side SSL certificate, and use
+ `verify="ca_bundle_path"` to set up the CA certificates for sever
+ side SSL certificate verification.
+
+ Returns:
+ Mapping[str, str]: The JSON-decoded response data.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+ """
+
+ response_status_ok, response_data, retryable_error = _token_endpoint_request_no_throw(
+ request,
+ token_uri,
+ body,
+ access_token=access_token,
+ use_json=use_json,
+ can_retry=can_retry,
+ headers=headers,
+ **kwargs
+ )
+ if not response_status_ok:
+ _handle_error_response(response_data, retryable_error)
+ return response_data
+
+
+def jwt_grant(request, token_uri, assertion, can_retry=True):
+ """Implements the JWT Profile for OAuth 2.0 Authorization Grants.
+
+ For more details, see `rfc7523 section 4`_.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
+ URI.
+ assertion (str): The OAuth 2.0 assertion.
+ can_retry (bool): Enable or disable request retry behavior.
+
+ Returns:
+ Tuple[str, Optional[datetime], Mapping[str, str]]: The access token,
+ expiration, and additional data returned by the token endpoint.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+
+ .. _rfc7523 section 4: https://tools.ietf.org/html/rfc7523#section-4
+ """
+ body = {"assertion": assertion, "grant_type": _JWT_GRANT_TYPE}
+
+ response_data = _token_endpoint_request(
+ request,
+ token_uri,
+ body,
+ can_retry=can_retry,
+ headers={
+ metrics.API_CLIENT_HEADER: metrics.token_request_access_token_sa_assertion()
+ },
+ )
+
+ try:
+ access_token = response_data["access_token"]
+ except KeyError as caught_exc:
+ new_exc = exceptions.RefreshError(
+ "No access token in response.", response_data, retryable=False
+ )
+ raise new_exc from caught_exc
+
+ expiry = _parse_expiry(response_data)
+
+ return access_token, expiry, response_data
+
+
+def call_iam_generate_id_token_endpoint(request, signer_email, audience, access_token):
+ """Call iam.generateIdToken endpoint to get ID token.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ signer_email (str): The signer email used to form the IAM
+ generateIdToken endpoint.
+ audience (str): The audience for the ID token.
+ access_token (str): The access token used to call the IAM endpoint.
+
+ Returns:
+ Tuple[str, datetime]: The ID token and expiration.
+ """
+ body = {"audience": audience, "includeEmail": "true", "useEmailAzp": "true"}
+
+ response_data = _token_endpoint_request(
+ request,
+ _IAM_IDTOKEN_ENDPOINT.format(signer_email),
+ body,
+ access_token=access_token,
+ use_json=True,
+ )
+
+ try:
+ id_token = response_data["token"]
+ except KeyError as caught_exc:
+ new_exc = exceptions.RefreshError(
+ "No ID token in response.", response_data, retryable=False
+ )
+ raise new_exc from caught_exc
+
+ payload = jwt.decode(id_token, verify=False)
+ expiry = datetime.datetime.utcfromtimestamp(payload["exp"])
+
+ return id_token, expiry
+
+
+def id_token_jwt_grant(request, token_uri, assertion, can_retry=True):
+ """Implements the JWT Profile for OAuth 2.0 Authorization Grants, but
+ requests an OpenID Connect ID Token instead of an access token.
+
+ This is a variant on the standard JWT Profile that is currently unique
+ to Google. This was added for the benefit of authenticating to services
+ that require ID Tokens instead of access tokens or JWT bearer tokens.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorization server's token endpoint
+ URI.
+ assertion (str): JWT token signed by a service account. The token's
+ payload must include a ``target_audience`` claim.
+ can_retry (bool): Enable or disable request retry behavior.
+
+ Returns:
+ Tuple[str, Optional[datetime], Mapping[str, str]]:
+ The (encoded) Open ID Connect ID Token, expiration, and additional
+ data returned by the endpoint.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+ """
+ body = {"assertion": assertion, "grant_type": _JWT_GRANT_TYPE}
+
+ response_data = _token_endpoint_request(
+ request,
+ token_uri,
+ body,
+ can_retry=can_retry,
+ headers={
+ metrics.API_CLIENT_HEADER: metrics.token_request_id_token_sa_assertion()
+ },
+ )
+
+ try:
+ id_token = response_data["id_token"]
+ except KeyError as caught_exc:
+ new_exc = exceptions.RefreshError(
+ "No ID token in response.", response_data, retryable=False
+ )
+ raise new_exc from caught_exc
+
+ payload = jwt.decode(id_token, verify=False)
+ expiry = datetime.datetime.utcfromtimestamp(payload["exp"])
+
+ return id_token, expiry, response_data
+
+
+def _handle_refresh_grant_response(response_data, refresh_token):
+ """Extract tokens from refresh grant response.
+
+ Args:
+ response_data (Mapping[str, str]): Refresh grant response data.
+ refresh_token (str): Current refresh token.
+
+ Returns:
+ Tuple[str, str, Optional[datetime], Mapping[str, str]]: The access token,
+ refresh token, expiration, and additional data returned by the token
+ endpoint. If response_data doesn't have refresh token, then the current
+ refresh token will be returned.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+ """
+ try:
+ access_token = response_data["access_token"]
+ except KeyError as caught_exc:
+ new_exc = exceptions.RefreshError(
+ "No access token in response.", response_data, retryable=False
+ )
+ raise new_exc from caught_exc
+
+ refresh_token = response_data.get("refresh_token", refresh_token)
+ expiry = _parse_expiry(response_data)
+
+ return access_token, refresh_token, expiry, response_data
+
+
+def refresh_grant(
+ request,
+ token_uri,
+ refresh_token,
+ client_id,
+ client_secret,
+ scopes=None,
+ rapt_token=None,
+ can_retry=True,
+):
+ """Implements the OAuth 2.0 refresh token grant.
+
+ For more details, see `rfc678 section 6`_.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
+ URI.
+ refresh_token (str): The refresh token to use to get a new access
+ token.
+ client_id (str): The OAuth 2.0 application's client ID.
+ client_secret (str): The Oauth 2.0 appliaction's client secret.
+ scopes (Optional(Sequence[str])): Scopes to request. If present, all
+ scopes must be authorized for the refresh token. Useful if refresh
+ token has a wild card scope (e.g.
+ 'https://www.googleapis.com/auth/any-api').
+ rapt_token (Optional(str)): The reauth Proof Token.
+ can_retry (bool): Enable or disable request retry behavior.
+
+ Returns:
+ Tuple[str, str, Optional[datetime], Mapping[str, str]]: The access
+ token, new or current refresh token, expiration, and additional data
+ returned by the token endpoint.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+
+ .. _rfc6748 section 6: https://tools.ietf.org/html/rfc6749#section-6
+ """
+ body = {
+ "grant_type": _REFRESH_GRANT_TYPE,
+ "client_id": client_id,
+ "client_secret": client_secret,
+ "refresh_token": refresh_token,
+ }
+ if scopes:
+ body["scope"] = " ".join(scopes)
+ if rapt_token:
+ body["rapt"] = rapt_token
+
+ response_data = _token_endpoint_request(
+ request, token_uri, body, can_retry=can_retry
+ )
+ return _handle_refresh_grant_response(response_data, refresh_token)
diff --git a/Lib/site-packages/google/oauth2/_client_async.py b/Lib/site-packages/google/oauth2/_client_async.py
new file mode 100644
index 0000000..2858d86
--- /dev/null
+++ b/Lib/site-packages/google/oauth2/_client_async.py
@@ -0,0 +1,292 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""OAuth 2.0 async client.
+
+This is a client for interacting with an OAuth 2.0 authorization server's
+token endpoint.
+
+For more information about the token endpoint, see
+`Section 3.1 of rfc6749`_
+
+.. _Section 3.1 of rfc6749: https://tools.ietf.org/html/rfc6749#section-3.2
+"""
+
+import datetime
+import http.client as http_client
+import json
+import urllib
+
+from google.auth import _exponential_backoff
+from google.auth import exceptions
+from google.auth import jwt
+from google.oauth2 import _client as client
+
+
+async def _token_endpoint_request_no_throw(
+ request, token_uri, body, access_token=None, use_json=False, can_retry=True
+):
+ """Makes a request to the OAuth 2.0 authorization server's token endpoint.
+ This function doesn't throw on response errors.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
+ URI.
+ body (Mapping[str, str]): The parameters to send in the request body.
+ access_token (Optional(str)): The access token needed to make the request.
+ use_json (Optional(bool)): Use urlencoded format or json format for the
+ content type. The default value is False.
+ can_retry (bool): Enable or disable request retry behavior.
+
+ Returns:
+ Tuple(bool, Mapping[str, str], Optional[bool]): A boolean indicating
+ if the request is successful, a mapping for the JSON-decoded response
+ data and in the case of an error a boolean indicating if the error
+ is retryable.
+ """
+ if use_json:
+ headers = {"Content-Type": client._JSON_CONTENT_TYPE}
+ body = json.dumps(body).encode("utf-8")
+ else:
+ headers = {"Content-Type": client._URLENCODED_CONTENT_TYPE}
+ body = urllib.parse.urlencode(body).encode("utf-8")
+
+ if access_token:
+ headers["Authorization"] = "Bearer {}".format(access_token)
+
+ async def _perform_request():
+ response = await request(
+ method="POST", url=token_uri, headers=headers, body=body
+ )
+
+ # Using data.read() resulted in zlib decompression errors. This may require future investigation.
+ response_body1 = await response.content()
+
+ response_body = (
+ response_body1.decode("utf-8")
+ if hasattr(response_body1, "decode")
+ else response_body1
+ )
+
+ try:
+ response_data = json.loads(response_body)
+ except ValueError:
+ response_data = response_body
+
+ if response.status == http_client.OK:
+ return True, response_data, None
+
+ retryable_error = client._can_retry(
+ status_code=response.status, response_data=response_data
+ )
+
+ return False, response_data, retryable_error
+
+ request_succeeded, response_data, retryable_error = await _perform_request()
+
+ if request_succeeded or not retryable_error or not can_retry:
+ return request_succeeded, response_data, retryable_error
+
+ retries = _exponential_backoff.ExponentialBackoff()
+ for _ in retries:
+ request_succeeded, response_data, retryable_error = await _perform_request()
+ if request_succeeded or not retryable_error:
+ return request_succeeded, response_data, retryable_error
+
+ return False, response_data, retryable_error
+
+
+async def _token_endpoint_request(
+ request, token_uri, body, access_token=None, use_json=False, can_retry=True
+):
+ """Makes a request to the OAuth 2.0 authorization server's token endpoint.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
+ URI.
+ body (Mapping[str, str]): The parameters to send in the request body.
+ access_token (Optional(str)): The access token needed to make the request.
+ use_json (Optional(bool)): Use urlencoded format or json format for the
+ content type. The default value is False.
+ can_retry (bool): Enable or disable request retry behavior.
+
+ Returns:
+ Mapping[str, str]: The JSON-decoded response data.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+ """
+
+ response_status_ok, response_data, retryable_error = await _token_endpoint_request_no_throw(
+ request,
+ token_uri,
+ body,
+ access_token=access_token,
+ use_json=use_json,
+ can_retry=can_retry,
+ )
+ if not response_status_ok:
+ client._handle_error_response(response_data, retryable_error)
+ return response_data
+
+
+async def jwt_grant(request, token_uri, assertion, can_retry=True):
+ """Implements the JWT Profile for OAuth 2.0 Authorization Grants.
+
+ For more details, see `rfc7523 section 4`_.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
+ URI.
+ assertion (str): The OAuth 2.0 assertion.
+ can_retry (bool): Enable or disable request retry behavior.
+
+ Returns:
+ Tuple[str, Optional[datetime], Mapping[str, str]]: The access token,
+ expiration, and additional data returned by the token endpoint.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+
+ .. _rfc7523 section 4: https://tools.ietf.org/html/rfc7523#section-4
+ """
+ body = {"assertion": assertion, "grant_type": client._JWT_GRANT_TYPE}
+
+ response_data = await _token_endpoint_request(
+ request, token_uri, body, can_retry=can_retry
+ )
+
+ try:
+ access_token = response_data["access_token"]
+ except KeyError as caught_exc:
+ new_exc = exceptions.RefreshError(
+ "No access token in response.", response_data, retryable=False
+ )
+ raise new_exc from caught_exc
+
+ expiry = client._parse_expiry(response_data)
+
+ return access_token, expiry, response_data
+
+
+async def id_token_jwt_grant(request, token_uri, assertion, can_retry=True):
+ """Implements the JWT Profile for OAuth 2.0 Authorization Grants, but
+ requests an OpenID Connect ID Token instead of an access token.
+
+ This is a variant on the standard JWT Profile that is currently unique
+ to Google. This was added for the benefit of authenticating to services
+ that require ID Tokens instead of access tokens or JWT bearer tokens.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorization server's token endpoint
+ URI.
+ assertion (str): JWT token signed by a service account. The token's
+ payload must include a ``target_audience`` claim.
+ can_retry (bool): Enable or disable request retry behavior.
+
+ Returns:
+ Tuple[str, Optional[datetime], Mapping[str, str]]:
+ The (encoded) Open ID Connect ID Token, expiration, and additional
+ data returned by the endpoint.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+ """
+ body = {"assertion": assertion, "grant_type": client._JWT_GRANT_TYPE}
+
+ response_data = await _token_endpoint_request(
+ request, token_uri, body, can_retry=can_retry
+ )
+
+ try:
+ id_token = response_data["id_token"]
+ except KeyError as caught_exc:
+ new_exc = exceptions.RefreshError(
+ "No ID token in response.", response_data, retryable=False
+ )
+ raise new_exc from caught_exc
+
+ payload = jwt.decode(id_token, verify=False)
+ expiry = datetime.datetime.utcfromtimestamp(payload["exp"])
+
+ return id_token, expiry, response_data
+
+
+async def refresh_grant(
+ request,
+ token_uri,
+ refresh_token,
+ client_id,
+ client_secret,
+ scopes=None,
+ rapt_token=None,
+ can_retry=True,
+):
+ """Implements the OAuth 2.0 refresh token grant.
+
+ For more details, see `rfc678 section 6`_.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
+ URI.
+ refresh_token (str): The refresh token to use to get a new access
+ token.
+ client_id (str): The OAuth 2.0 application's client ID.
+ client_secret (str): The Oauth 2.0 appliaction's client secret.
+ scopes (Optional(Sequence[str])): Scopes to request. If present, all
+ scopes must be authorized for the refresh token. Useful if refresh
+ token has a wild card scope (e.g.
+ 'https://www.googleapis.com/auth/any-api').
+ rapt_token (Optional(str)): The reauth Proof Token.
+ can_retry (bool): Enable or disable request retry behavior.
+
+ Returns:
+ Tuple[str, Optional[str], Optional[datetime], Mapping[str, str]]: The
+ access token, new or current refresh token, expiration, and additional data
+ returned by the token endpoint.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+
+ .. _rfc6748 section 6: https://tools.ietf.org/html/rfc6749#section-6
+ """
+ body = {
+ "grant_type": client._REFRESH_GRANT_TYPE,
+ "client_id": client_id,
+ "client_secret": client_secret,
+ "refresh_token": refresh_token,
+ }
+ if scopes:
+ body["scope"] = " ".join(scopes)
+ if rapt_token:
+ body["rapt"] = rapt_token
+
+ response_data = await _token_endpoint_request(
+ request, token_uri, body, can_retry=can_retry
+ )
+ return client._handle_refresh_grant_response(response_data, refresh_token)
diff --git a/Lib/site-packages/google/oauth2/_credentials_async.py b/Lib/site-packages/google/oauth2/_credentials_async.py
new file mode 100644
index 0000000..b5561aa
--- /dev/null
+++ b/Lib/site-packages/google/oauth2/_credentials_async.py
@@ -0,0 +1,118 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""OAuth 2.0 Async Credentials.
+
+This module provides credentials based on OAuth 2.0 access and refresh tokens.
+These credentials usually access resources on behalf of a user (resource
+owner).
+
+Specifically, this is intended to use access tokens acquired using the
+`Authorization Code grant`_ and can refresh those tokens using a
+optional `refresh token`_.
+
+Obtaining the initial access and refresh token is outside of the scope of this
+module. Consult `rfc6749 section 4.1`_ for complete details on the
+Authorization Code grant flow.
+
+.. _Authorization Code grant: https://tools.ietf.org/html/rfc6749#section-1.3.1
+.. _refresh token: https://tools.ietf.org/html/rfc6749#section-6
+.. _rfc6749 section 4.1: https://tools.ietf.org/html/rfc6749#section-4.1
+"""
+
+from google.auth import _credentials_async as credentials
+from google.auth import _helpers
+from google.auth import exceptions
+from google.oauth2 import _reauth_async as reauth
+from google.oauth2 import credentials as oauth2_credentials
+
+
+class Credentials(oauth2_credentials.Credentials):
+ """Credentials using OAuth 2.0 access and refresh tokens.
+
+ The credentials are considered immutable. If you want to modify the
+ quota project, use :meth:`with_quota_project` or ::
+
+ credentials = credentials.with_quota_project('myproject-123)
+ """
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ async def refresh(self, request):
+ if (
+ self._refresh_token is None
+ or self._token_uri is None
+ or self._client_id is None
+ or self._client_secret is None
+ ):
+ raise exceptions.RefreshError(
+ "The credentials do not contain the necessary fields need to "
+ "refresh the access token. You must specify refresh_token, "
+ "token_uri, client_id, and client_secret."
+ )
+
+ (
+ access_token,
+ refresh_token,
+ expiry,
+ grant_response,
+ rapt_token,
+ ) = await reauth.refresh_grant(
+ request,
+ self._token_uri,
+ self._refresh_token,
+ self._client_id,
+ self._client_secret,
+ scopes=self._scopes,
+ rapt_token=self._rapt_token,
+ enable_reauth_refresh=self._enable_reauth_refresh,
+ )
+
+ self.token = access_token
+ self.expiry = expiry
+ self._refresh_token = refresh_token
+ self._id_token = grant_response.get("id_token")
+ self._rapt_token = rapt_token
+
+ if self._scopes and "scope" in grant_response:
+ requested_scopes = frozenset(self._scopes)
+ granted_scopes = frozenset(grant_response["scope"].split())
+ scopes_requested_but_not_granted = requested_scopes - granted_scopes
+ if scopes_requested_but_not_granted:
+ raise exceptions.RefreshError(
+ "Not all requested scopes were granted by the "
+ "authorization server, missing scopes {}.".format(
+ ", ".join(scopes_requested_but_not_granted)
+ )
+ )
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ async def before_request(self, request, method, url, headers):
+ if not self.valid:
+ await self.refresh(request)
+ self.apply(headers)
+
+
+class UserAccessTokenCredentials(oauth2_credentials.UserAccessTokenCredentials):
+ """Access token credentials for user account.
+
+ Obtain the access token for a given user account or the current active
+ user account with the ``gcloud auth print-access-token`` command.
+
+ Args:
+ account (Optional[str]): Account to get the access token for. If not
+ specified, the current active account will be used.
+ quota_project_id (Optional[str]): The project ID used for quota
+ and billing.
+
+ """
diff --git a/Lib/site-packages/google/oauth2/_id_token_async.py b/Lib/site-packages/google/oauth2/_id_token_async.py
new file mode 100644
index 0000000..6594e41
--- /dev/null
+++ b/Lib/site-packages/google/oauth2/_id_token_async.py
@@ -0,0 +1,285 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google ID Token helpers.
+
+Provides support for verifying `OpenID Connect ID Tokens`_, especially ones
+generated by Google infrastructure.
+
+To parse and verify an ID Token issued by Google's OAuth 2.0 authorization
+server use :func:`verify_oauth2_token`. To verify an ID Token issued by
+Firebase, use :func:`verify_firebase_token`.
+
+A general purpose ID Token verifier is available as :func:`verify_token`.
+
+Example::
+
+ from google.oauth2 import _id_token_async
+ from google.auth.transport import aiohttp_requests
+
+ request = aiohttp_requests.Request()
+
+ id_info = await _id_token_async.verify_oauth2_token(
+ token, request, 'my-client-id.example.com')
+
+ if id_info['iss'] != 'https://accounts.google.com':
+ raise ValueError('Wrong issuer.')
+
+ userid = id_info['sub']
+
+By default, this will re-fetch certificates for each verification. Because
+Google's public keys are only changed infrequently (on the order of once per
+day), you may wish to take advantage of caching to reduce latency and the
+potential for network errors. This can be accomplished using an external
+library like `CacheControl`_ to create a cache-aware
+:class:`google.auth.transport.Request`::
+
+ import cachecontrol
+ import google.auth.transport.requests
+ import requests
+
+ session = requests.session()
+ cached_session = cachecontrol.CacheControl(session)
+ request = google.auth.transport.requests.Request(session=cached_session)
+
+.. _OpenID Connect ID Token:
+ http://openid.net/specs/openid-connect-core-1_0.html#IDToken
+.. _CacheControl: https://cachecontrol.readthedocs.io
+"""
+
+import http.client as http_client
+import json
+import os
+
+from google.auth import environment_vars
+from google.auth import exceptions
+from google.auth import jwt
+from google.auth.transport import requests
+from google.oauth2 import id_token as sync_id_token
+
+
+async def _fetch_certs(request, certs_url):
+ """Fetches certificates.
+
+ Google-style cerificate endpoints return JSON in the format of
+ ``{'key id': 'x509 certificate'}``.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests. This must be an aiohttp request.
+ certs_url (str): The certificate endpoint URL.
+
+ Returns:
+ Mapping[str, str]: A mapping of public key ID to x.509 certificate
+ data.
+ """
+ response = await request(certs_url, method="GET")
+
+ if response.status != http_client.OK:
+ raise exceptions.TransportError(
+ "Could not fetch certificates at {}".format(certs_url)
+ )
+
+ data = await response.content()
+
+ return json.loads(data)
+
+
+async def verify_token(
+ id_token,
+ request,
+ audience=None,
+ certs_url=sync_id_token._GOOGLE_OAUTH2_CERTS_URL,
+ clock_skew_in_seconds=0,
+):
+ """Verifies an ID token and returns the decoded token.
+
+ Args:
+ id_token (Union[str, bytes]): The encoded token.
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests. This must be an aiohttp request.
+ audience (str): The audience that this token is intended for. If None
+ then the audience is not verified.
+ certs_url (str): The URL that specifies the certificates to use to
+ verify the token. This URL should return JSON in the format of
+ ``{'key id': 'x509 certificate'}``.
+ clock_skew_in_seconds (int): The clock skew used for `iat` and `exp`
+ validation.
+
+ Returns:
+ Mapping[str, Any]: The decoded token.
+ """
+ certs = await _fetch_certs(request, certs_url)
+
+ return jwt.decode(
+ id_token,
+ certs=certs,
+ audience=audience,
+ clock_skew_in_seconds=clock_skew_in_seconds,
+ )
+
+
+async def verify_oauth2_token(
+ id_token, request, audience=None, clock_skew_in_seconds=0
+):
+ """Verifies an ID Token issued by Google's OAuth 2.0 authorization server.
+
+ Args:
+ id_token (Union[str, bytes]): The encoded token.
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests. This must be an aiohttp request.
+ audience (str): The audience that this token is intended for. This is
+ typically your application's OAuth 2.0 client ID. If None then the
+ audience is not verified.
+ clock_skew_in_seconds (int): The clock skew used for `iat` and `exp`
+ validation.
+
+ Returns:
+ Mapping[str, Any]: The decoded token.
+
+ Raises:
+ exceptions.GoogleAuthError: If the issuer is invalid.
+ """
+ idinfo = await verify_token(
+ id_token,
+ request,
+ audience=audience,
+ certs_url=sync_id_token._GOOGLE_OAUTH2_CERTS_URL,
+ clock_skew_in_seconds=clock_skew_in_seconds,
+ )
+
+ if idinfo["iss"] not in sync_id_token._GOOGLE_ISSUERS:
+ raise exceptions.GoogleAuthError(
+ "Wrong issuer. 'iss' should be one of the following: {}".format(
+ sync_id_token._GOOGLE_ISSUERS
+ )
+ )
+
+ return idinfo
+
+
+async def verify_firebase_token(
+ id_token, request, audience=None, clock_skew_in_seconds=0
+):
+ """Verifies an ID Token issued by Firebase Authentication.
+
+ Args:
+ id_token (Union[str, bytes]): The encoded token.
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests. This must be an aiohttp request.
+ audience (str): The audience that this token is intended for. This is
+ typically your Firebase application ID. If None then the audience
+ is not verified.
+ clock_skew_in_seconds (int): The clock skew used for `iat` and `exp`
+ validation.
+
+ Returns:
+ Mapping[str, Any]: The decoded token.
+ """
+ return await verify_token(
+ id_token,
+ request,
+ audience=audience,
+ certs_url=sync_id_token._GOOGLE_APIS_CERTS_URL,
+ clock_skew_in_seconds=clock_skew_in_seconds,
+ )
+
+
+async def fetch_id_token(request, audience):
+ """Fetch the ID Token from the current environment.
+
+ This function acquires ID token from the environment in the following order.
+ See https://google.aip.dev/auth/4110.
+
+ 1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
+ to the path of a valid service account JSON file, then ID token is
+ acquired using this service account credentials.
+ 2. If the application is running in Compute Engine, App Engine or Cloud Run,
+ then the ID token are obtained from the metadata server.
+ 3. If metadata server doesn't exist and no valid service account credentials
+ are found, :class:`~google.auth.exceptions.DefaultCredentialsError` will
+ be raised.
+
+ Example::
+
+ import google.oauth2._id_token_async
+ import google.auth.transport.aiohttp_requests
+
+ request = google.auth.transport.aiohttp_requests.Request()
+ target_audience = "https://pubsub.googleapis.com"
+
+ id_token = await google.oauth2._id_token_async.fetch_id_token(request, target_audience)
+
+ Args:
+ request (google.auth.transport.aiohttp_requests.Request): A callable used to make
+ HTTP requests.
+ audience (str): The audience that this ID token is intended for.
+
+ Returns:
+ str: The ID token.
+
+ Raises:
+ ~google.auth.exceptions.DefaultCredentialsError:
+ If metadata server doesn't exist and no valid service account
+ credentials are found.
+ """
+ # 1. Try to get credentials from the GOOGLE_APPLICATION_CREDENTIALS environment
+ # variable.
+ credentials_filename = os.environ.get(environment_vars.CREDENTIALS)
+ if credentials_filename:
+ if not (
+ os.path.exists(credentials_filename)
+ and os.path.isfile(credentials_filename)
+ ):
+ raise exceptions.DefaultCredentialsError(
+ "GOOGLE_APPLICATION_CREDENTIALS path is either not found or invalid."
+ )
+
+ try:
+ with open(credentials_filename, "r") as f:
+ from google.oauth2 import _service_account_async as service_account
+
+ info = json.load(f)
+ if info.get("type") == "service_account":
+ credentials = service_account.IDTokenCredentials.from_service_account_info(
+ info, target_audience=audience
+ )
+ await credentials.refresh(request)
+ return credentials.token
+ except ValueError as caught_exc:
+ new_exc = exceptions.DefaultCredentialsError(
+ "GOOGLE_APPLICATION_CREDENTIALS is not valid service account credentials.",
+ caught_exc,
+ )
+ raise new_exc from caught_exc
+
+ # 2. Try to fetch ID token from metada server if it exists. The code works
+ # for GAE and Cloud Run metadata server as well.
+ try:
+ from google.auth import compute_engine
+ from google.auth.compute_engine import _metadata
+
+ request_new = requests.Request()
+ if _metadata.ping(request_new):
+ credentials = compute_engine.IDTokenCredentials(
+ request_new, audience, use_metadata_identity_endpoint=True
+ )
+ credentials.refresh(request_new)
+ return credentials.token
+ except (ImportError, exceptions.TransportError):
+ pass
+
+ raise exceptions.DefaultCredentialsError(
+ "Neither metadata server or valid service account credentials are found."
+ )
diff --git a/Lib/site-packages/google/oauth2/_reauth_async.py b/Lib/site-packages/google/oauth2/_reauth_async.py
new file mode 100644
index 0000000..de3675c
--- /dev/null
+++ b/Lib/site-packages/google/oauth2/_reauth_async.py
@@ -0,0 +1,328 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A module that provides functions for handling rapt authentication.
+
+Reauth is a process of obtaining additional authentication (such as password,
+security token, etc.) while refreshing OAuth 2.0 credentials for a user.
+
+Credentials that use the Reauth flow must have the reauth scope,
+``https://www.googleapis.com/auth/accounts.reauth``.
+
+This module provides a high-level function for executing the Reauth process,
+:func:`refresh_grant`, and lower-level helpers for doing the individual
+steps of the reauth process.
+
+Those steps are:
+
+1. Obtaining a list of challenges from the reauth server.
+2. Running through each challenge and sending the result back to the reauth
+ server.
+3. Refreshing the access token using the returned rapt token.
+"""
+
+import sys
+
+from google.auth import exceptions
+from google.oauth2 import _client
+from google.oauth2 import _client_async
+from google.oauth2 import challenges
+from google.oauth2 import reauth
+
+
+async def _get_challenges(
+ request, supported_challenge_types, access_token, requested_scopes=None
+):
+ """Does initial request to reauth API to get the challenges.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests. This must be an aiohttp request.
+ supported_challenge_types (Sequence[str]): list of challenge names
+ supported by the manager.
+ access_token (str): Access token with reauth scopes.
+ requested_scopes (Optional(Sequence[str])): Authorized scopes for the credentials.
+
+ Returns:
+ dict: The response from the reauth API.
+ """
+ body = {"supportedChallengeTypes": supported_challenge_types}
+ if requested_scopes:
+ body["oauthScopesForDomainPolicyLookup"] = requested_scopes
+
+ return await _client_async._token_endpoint_request(
+ request,
+ reauth._REAUTH_API + ":start",
+ body,
+ access_token=access_token,
+ use_json=True,
+ )
+
+
+async def _send_challenge_result(
+ request, session_id, challenge_id, client_input, access_token
+):
+ """Attempt to refresh access token by sending next challenge result.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests. This must be an aiohttp request.
+ session_id (str): session id returned by the initial reauth call.
+ challenge_id (str): challenge id returned by the initial reauth call.
+ client_input: dict with a challenge-specific client input. For example:
+ ``{'credential': password}`` for password challenge.
+ access_token (str): Access token with reauth scopes.
+
+ Returns:
+ dict: The response from the reauth API.
+ """
+ body = {
+ "sessionId": session_id,
+ "challengeId": challenge_id,
+ "action": "RESPOND",
+ "proposalResponse": client_input,
+ }
+
+ return await _client_async._token_endpoint_request(
+ request,
+ reauth._REAUTH_API + "/{}:continue".format(session_id),
+ body,
+ access_token=access_token,
+ use_json=True,
+ )
+
+
+async def _run_next_challenge(msg, request, access_token):
+ """Get the next challenge from msg and run it.
+
+ Args:
+ msg (dict): Reauth API response body (either from the initial request to
+ https://reauth.googleapis.com/v2/sessions:start or from sending the
+ previous challenge response to
+ https://reauth.googleapis.com/v2/sessions/id:continue)
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests. This must be an aiohttp request.
+ access_token (str): reauth access token
+
+ Returns:
+ dict: The response from the reauth API.
+
+ Raises:
+ google.auth.exceptions.ReauthError: if reauth failed.
+ """
+ for challenge in msg["challenges"]:
+ if challenge["status"] != "READY":
+ # Skip non-activated challenges.
+ continue
+ c = challenges.AVAILABLE_CHALLENGES.get(challenge["challengeType"], None)
+ if not c:
+ raise exceptions.ReauthFailError(
+ "Unsupported challenge type {0}. Supported types: {1}".format(
+ challenge["challengeType"],
+ ",".join(list(challenges.AVAILABLE_CHALLENGES.keys())),
+ )
+ )
+ if not c.is_locally_eligible:
+ raise exceptions.ReauthFailError(
+ "Challenge {0} is not locally eligible".format(
+ challenge["challengeType"]
+ )
+ )
+ client_input = c.obtain_challenge_input(challenge)
+ if not client_input:
+ return None
+ return await _send_challenge_result(
+ request,
+ msg["sessionId"],
+ challenge["challengeId"],
+ client_input,
+ access_token,
+ )
+ return None
+
+
+async def _obtain_rapt(request, access_token, requested_scopes):
+ """Given an http request method and reauth access token, get rapt token.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests. This must be an aiohttp request.
+ access_token (str): reauth access token
+ requested_scopes (Sequence[str]): scopes required by the client application
+
+ Returns:
+ str: The rapt token.
+
+ Raises:
+ google.auth.exceptions.ReauthError: if reauth failed
+ """
+ msg = await _get_challenges(
+ request,
+ list(challenges.AVAILABLE_CHALLENGES.keys()),
+ access_token,
+ requested_scopes,
+ )
+
+ if msg["status"] == reauth._AUTHENTICATED:
+ return msg["encodedProofOfReauthToken"]
+
+ for _ in range(0, reauth.RUN_CHALLENGE_RETRY_LIMIT):
+ if not (
+ msg["status"] == reauth._CHALLENGE_REQUIRED
+ or msg["status"] == reauth._CHALLENGE_PENDING
+ ):
+ raise exceptions.ReauthFailError(
+ "Reauthentication challenge failed due to API error: {}".format(
+ msg["status"]
+ )
+ )
+
+ if not reauth.is_interactive():
+ raise exceptions.ReauthFailError(
+ "Reauthentication challenge could not be answered because you are not"
+ " in an interactive session."
+ )
+
+ msg = await _run_next_challenge(msg, request, access_token)
+
+ if msg["status"] == reauth._AUTHENTICATED:
+ return msg["encodedProofOfReauthToken"]
+
+ # If we got here it means we didn't get authenticated.
+ raise exceptions.ReauthFailError("Failed to obtain rapt token.")
+
+
+async def get_rapt_token(
+ request, client_id, client_secret, refresh_token, token_uri, scopes=None
+):
+ """Given an http request method and refresh_token, get rapt token.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests. This must be an aiohttp request.
+ client_id (str): client id to get access token for reauth scope.
+ client_secret (str): client secret for the client_id
+ refresh_token (str): refresh token to refresh access token
+ token_uri (str): uri to refresh access token
+ scopes (Optional(Sequence[str])): scopes required by the client application
+
+ Returns:
+ str: The rapt token.
+ Raises:
+ google.auth.exceptions.RefreshError: If reauth failed.
+ """
+ sys.stderr.write("Reauthentication required.\n")
+
+ # Get access token for reauth.
+ access_token, _, _, _ = await _client_async.refresh_grant(
+ request=request,
+ client_id=client_id,
+ client_secret=client_secret,
+ refresh_token=refresh_token,
+ token_uri=token_uri,
+ scopes=[reauth._REAUTH_SCOPE],
+ )
+
+ # Get rapt token from reauth API.
+ rapt_token = await _obtain_rapt(request, access_token, requested_scopes=scopes)
+
+ return rapt_token
+
+
+async def refresh_grant(
+ request,
+ token_uri,
+ refresh_token,
+ client_id,
+ client_secret,
+ scopes=None,
+ rapt_token=None,
+ enable_reauth_refresh=False,
+):
+ """Implements the reauthentication flow.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests. This must be an aiohttp request.
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
+ URI.
+ refresh_token (str): The refresh token to use to get a new access
+ token.
+ client_id (str): The OAuth 2.0 application's client ID.
+ client_secret (str): The Oauth 2.0 appliaction's client secret.
+ scopes (Optional(Sequence[str])): Scopes to request. If present, all
+ scopes must be authorized for the refresh token. Useful if refresh
+ token has a wild card scope (e.g.
+ 'https://www.googleapis.com/auth/any-api').
+ rapt_token (Optional(str)): The rapt token for reauth.
+ enable_reauth_refresh (Optional[bool]): Whether reauth refresh flow
+ should be used. The default value is False. This option is for
+ gcloud only, other users should use the default value.
+
+ Returns:
+ Tuple[str, Optional[str], Optional[datetime], Mapping[str, str], str]: The
+ access token, new refresh token, expiration, the additional data
+ returned by the token endpoint, and the rapt token.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+ """
+ body = {
+ "grant_type": _client._REFRESH_GRANT_TYPE,
+ "client_id": client_id,
+ "client_secret": client_secret,
+ "refresh_token": refresh_token,
+ }
+ if scopes:
+ body["scope"] = " ".join(scopes)
+ if rapt_token:
+ body["rapt"] = rapt_token
+
+ response_status_ok, response_data, retryable_error = await _client_async._token_endpoint_request_no_throw(
+ request, token_uri, body
+ )
+ if (
+ not response_status_ok
+ and response_data.get("error") == reauth._REAUTH_NEEDED_ERROR
+ and (
+ response_data.get("error_subtype")
+ == reauth._REAUTH_NEEDED_ERROR_INVALID_RAPT
+ or response_data.get("error_subtype")
+ == reauth._REAUTH_NEEDED_ERROR_RAPT_REQUIRED
+ )
+ ):
+ if not enable_reauth_refresh:
+ raise exceptions.RefreshError(
+ "Reauthentication is needed. Please run `gcloud auth application-default login` to reauthenticate."
+ )
+
+ rapt_token = await get_rapt_token(
+ request, client_id, client_secret, refresh_token, token_uri, scopes=scopes
+ )
+ body["rapt"] = rapt_token
+ (
+ response_status_ok,
+ response_data,
+ retryable_error,
+ ) = await _client_async._token_endpoint_request_no_throw(
+ request, token_uri, body
+ )
+
+ if not response_status_ok:
+ _client._handle_error_response(response_data, retryable_error)
+ refresh_response = _client._handle_refresh_grant_response(
+ response_data, refresh_token
+ )
+ return refresh_response + (rapt_token,)
diff --git a/Lib/site-packages/google/oauth2/_service_account_async.py b/Lib/site-packages/google/oauth2/_service_account_async.py
new file mode 100644
index 0000000..cfd315a
--- /dev/null
+++ b/Lib/site-packages/google/oauth2/_service_account_async.py
@@ -0,0 +1,132 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Service Accounts: JSON Web Token (JWT) Profile for OAuth 2.0
+
+NOTE: This file adds asynchronous refresh methods to both credentials
+classes, and therefore async/await syntax is required when calling this
+method when using service account credentials with asynchronous functionality.
+Otherwise, all other methods are inherited from the regular service account
+credentials file google.oauth2.service_account
+
+"""
+
+from google.auth import _credentials_async as credentials_async
+from google.auth import _helpers
+from google.oauth2 import _client_async
+from google.oauth2 import service_account
+
+
+class Credentials(
+ service_account.Credentials, credentials_async.Scoped, credentials_async.Credentials
+):
+ """Service account credentials
+
+ Usually, you'll create these credentials with one of the helper
+ constructors. To create credentials using a Google service account
+ private key JSON file::
+
+ credentials = _service_account_async.Credentials.from_service_account_file(
+ 'service-account.json')
+
+ Or if you already have the service account file loaded::
+
+ service_account_info = json.load(open('service_account.json'))
+ credentials = _service_account_async.Credentials.from_service_account_info(
+ service_account_info)
+
+ Both helper methods pass on arguments to the constructor, so you can
+ specify additional scopes and a subject if necessary::
+
+ credentials = _service_account_async.Credentials.from_service_account_file(
+ 'service-account.json',
+ scopes=['email'],
+ subject='user@example.com')
+
+ The credentials are considered immutable. If you want to modify the scopes
+ or the subject used for delegation, use :meth:`with_scopes` or
+ :meth:`with_subject`::
+
+ scoped_credentials = credentials.with_scopes(['email'])
+ delegated_credentials = credentials.with_subject(subject)
+
+ To add a quota project, use :meth:`with_quota_project`::
+
+ credentials = credentials.with_quota_project('myproject-123')
+ """
+
+ @_helpers.copy_docstring(credentials_async.Credentials)
+ async def refresh(self, request):
+ assertion = self._make_authorization_grant_assertion()
+ access_token, expiry, _ = await _client_async.jwt_grant(
+ request, self._token_uri, assertion
+ )
+ self.token = access_token
+ self.expiry = expiry
+
+
+class IDTokenCredentials(
+ service_account.IDTokenCredentials,
+ credentials_async.Signing,
+ credentials_async.Credentials,
+):
+ """Open ID Connect ID Token-based service account credentials.
+
+ These credentials are largely similar to :class:`.Credentials`, but instead
+ of using an OAuth 2.0 Access Token as the bearer token, they use an Open
+ ID Connect ID Token as the bearer token. These credentials are useful when
+ communicating to services that require ID Tokens and can not accept access
+ tokens.
+
+ Usually, you'll create these credentials with one of the helper
+ constructors. To create credentials using a Google service account
+ private key JSON file::
+
+ credentials = (
+ _service_account_async.IDTokenCredentials.from_service_account_file(
+ 'service-account.json'))
+
+ Or if you already have the service account file loaded::
+
+ service_account_info = json.load(open('service_account.json'))
+ credentials = (
+ _service_account_async.IDTokenCredentials.from_service_account_info(
+ service_account_info))
+
+ Both helper methods pass on arguments to the constructor, so you can
+ specify additional scopes and a subject if necessary::
+
+ credentials = (
+ _service_account_async.IDTokenCredentials.from_service_account_file(
+ 'service-account.json',
+ scopes=['email'],
+ subject='user@example.com'))
+
+ The credentials are considered immutable. If you want to modify the scopes
+ or the subject used for delegation, use :meth:`with_scopes` or
+ :meth:`with_subject`::
+
+ scoped_credentials = credentials.with_scopes(['email'])
+ delegated_credentials = credentials.with_subject(subject)
+
+ """
+
+ @_helpers.copy_docstring(credentials_async.Credentials)
+ async def refresh(self, request):
+ assertion = self._make_authorization_grant_assertion()
+ access_token, expiry, _ = await _client_async.id_token_jwt_grant(
+ request, self._token_uri, assertion
+ )
+ self.token = access_token
+ self.expiry = expiry
diff --git a/Lib/site-packages/google/oauth2/challenges.py b/Lib/site-packages/google/oauth2/challenges.py
new file mode 100644
index 0000000..c557963
--- /dev/null
+++ b/Lib/site-packages/google/oauth2/challenges.py
@@ -0,0 +1,203 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" Challenges for reauthentication.
+"""
+
+import abc
+import base64
+import getpass
+import sys
+
+from google.auth import _helpers
+from google.auth import exceptions
+
+
+REAUTH_ORIGIN = "https://accounts.google.com"
+SAML_CHALLENGE_MESSAGE = (
+ "Please run `gcloud auth login` to complete reauthentication with SAML."
+)
+
+
+def get_user_password(text):
+ """Get password from user.
+
+ Override this function with a different logic if you are using this library
+ outside a CLI.
+
+ Args:
+ text (str): message for the password prompt.
+
+ Returns:
+ str: password string.
+ """
+ return getpass.getpass(text)
+
+
+class ReauthChallenge(metaclass=abc.ABCMeta):
+ """Base class for reauth challenges."""
+
+ @property
+ @abc.abstractmethod
+ def name(self): # pragma: NO COVER
+ """Returns the name of the challenge."""
+ raise NotImplementedError("name property must be implemented")
+
+ @property
+ @abc.abstractmethod
+ def is_locally_eligible(self): # pragma: NO COVER
+ """Returns true if a challenge is supported locally on this machine."""
+ raise NotImplementedError("is_locally_eligible property must be implemented")
+
+ @abc.abstractmethod
+ def obtain_challenge_input(self, metadata): # pragma: NO COVER
+ """Performs logic required to obtain credentials and returns it.
+
+ Args:
+ metadata (Mapping): challenge metadata returned in the 'challenges' field in
+ the initial reauth request. Includes the 'challengeType' field
+ and other challenge-specific fields.
+
+ Returns:
+ response that will be send to the reauth service as the content of
+ the 'proposalResponse' field in the request body. Usually a dict
+ with the keys specific to the challenge. For example,
+ ``{'credential': password}`` for password challenge.
+ """
+ raise NotImplementedError("obtain_challenge_input method must be implemented")
+
+
+class PasswordChallenge(ReauthChallenge):
+ """Challenge that asks for user's password."""
+
+ @property
+ def name(self):
+ return "PASSWORD"
+
+ @property
+ def is_locally_eligible(self):
+ return True
+
+ @_helpers.copy_docstring(ReauthChallenge)
+ def obtain_challenge_input(self, unused_metadata):
+ passwd = get_user_password("Please enter your password:")
+ if not passwd:
+ passwd = " " # avoid the server crashing in case of no password :D
+ return {"credential": passwd}
+
+
+class SecurityKeyChallenge(ReauthChallenge):
+ """Challenge that asks for user's security key touch."""
+
+ @property
+ def name(self):
+ return "SECURITY_KEY"
+
+ @property
+ def is_locally_eligible(self):
+ return True
+
+ @_helpers.copy_docstring(ReauthChallenge)
+ def obtain_challenge_input(self, metadata):
+ try:
+ import pyu2f.convenience.authenticator # type: ignore
+ import pyu2f.errors # type: ignore
+ import pyu2f.model # type: ignore
+ except ImportError:
+ raise exceptions.ReauthFailError(
+ "pyu2f dependency is required to use Security key reauth feature. "
+ "It can be installed via `pip install pyu2f` or `pip install google-auth[reauth]`."
+ )
+ sk = metadata["securityKey"]
+ challenges = sk["challenges"]
+ # Read both 'applicationId' and 'relyingPartyId', if they are the same, use
+ # applicationId, if they are different, use relyingPartyId first and retry
+ # with applicationId
+ application_id = sk["applicationId"]
+ relying_party_id = sk["relyingPartyId"]
+
+ if application_id != relying_party_id:
+ application_parameters = [relying_party_id, application_id]
+ else:
+ application_parameters = [application_id]
+
+ challenge_data = []
+ for c in challenges:
+ kh = c["keyHandle"].encode("ascii")
+ key = pyu2f.model.RegisteredKey(bytearray(base64.urlsafe_b64decode(kh)))
+ challenge = c["challenge"].encode("ascii")
+ challenge = base64.urlsafe_b64decode(challenge)
+ challenge_data.append({"key": key, "challenge": challenge})
+
+ # Track number of tries to suppress error message until all application_parameters
+ # are tried.
+ tries = 0
+ for app_id in application_parameters:
+ try:
+ tries += 1
+ api = pyu2f.convenience.authenticator.CreateCompositeAuthenticator(
+ REAUTH_ORIGIN
+ )
+ response = api.Authenticate(
+ app_id, challenge_data, print_callback=sys.stderr.write
+ )
+ return {"securityKey": response}
+ except pyu2f.errors.U2FError as e:
+ if e.code == pyu2f.errors.U2FError.DEVICE_INELIGIBLE:
+ # Only show error if all app_ids have been tried
+ if tries == len(application_parameters):
+ sys.stderr.write("Ineligible security key.\n")
+ return None
+ continue
+ if e.code == pyu2f.errors.U2FError.TIMEOUT:
+ sys.stderr.write(
+ "Timed out while waiting for security key touch.\n"
+ )
+ else:
+ raise e
+ except pyu2f.errors.PluginError as e:
+ sys.stderr.write("Plugin error: {}.\n".format(e))
+ continue
+ except pyu2f.errors.NoDeviceFoundError:
+ sys.stderr.write("No security key found.\n")
+ return None
+
+
+class SamlChallenge(ReauthChallenge):
+ """Challenge that asks the users to browse to their ID Providers.
+
+ Currently SAML challenge is not supported. When obtaining the challenge
+ input, exception will be raised to instruct the users to run
+ `gcloud auth login` for reauthentication.
+ """
+
+ @property
+ def name(self):
+ return "SAML"
+
+ @property
+ def is_locally_eligible(self):
+ return True
+
+ def obtain_challenge_input(self, metadata):
+ # Magic Arch has not fully supported returning a proper dedirect URL
+ # for programmatic SAML users today. So we error our here and request
+ # users to use gcloud to complete a login.
+ raise exceptions.ReauthSamlChallengeFailError(SAML_CHALLENGE_MESSAGE)
+
+
+AVAILABLE_CHALLENGES = {
+ challenge.name: challenge
+ for challenge in [SecurityKeyChallenge(), PasswordChallenge(), SamlChallenge()]
+}
diff --git a/Lib/site-packages/google/oauth2/credentials.py b/Lib/site-packages/google/oauth2/credentials.py
new file mode 100644
index 0000000..c239bee
--- /dev/null
+++ b/Lib/site-packages/google/oauth2/credentials.py
@@ -0,0 +1,635 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""OAuth 2.0 Credentials.
+
+This module provides credentials based on OAuth 2.0 access and refresh tokens.
+These credentials usually access resources on behalf of a user (resource
+owner).
+
+Specifically, this is intended to use access tokens acquired using the
+`Authorization Code grant`_ and can refresh those tokens using a
+optional `refresh token`_.
+
+Obtaining the initial access and refresh token is outside of the scope of this
+module. Consult `rfc6749 section 4.1`_ for complete details on the
+Authorization Code grant flow.
+
+.. _Authorization Code grant: https://tools.ietf.org/html/rfc6749#section-1.3.1
+.. _refresh token: https://tools.ietf.org/html/rfc6749#section-6
+.. _rfc6749 section 4.1: https://tools.ietf.org/html/rfc6749#section-4.1
+"""
+
+from datetime import datetime
+import io
+import json
+import logging
+import warnings
+
+from google.auth import _cloud_sdk
+from google.auth import _helpers
+from google.auth import credentials
+from google.auth import exceptions
+from google.auth import metrics
+from google.oauth2 import reauth
+
+_LOGGER = logging.getLogger(__name__)
+
+
+# The Google OAuth 2.0 token endpoint. Used for authorized user credentials.
+_GOOGLE_OAUTH2_TOKEN_ENDPOINT = "https://oauth2.googleapis.com/token"
+_DEFAULT_UNIVERSE_DOMAIN = "googleapis.com"
+
+
+class Credentials(credentials.ReadOnlyScoped, credentials.CredentialsWithQuotaProject):
+ """Credentials using OAuth 2.0 access and refresh tokens.
+
+ The credentials are considered immutable except the tokens and the token
+ expiry, which are updated after refresh. If you want to modify the quota
+ project, use :meth:`with_quota_project` or ::
+
+ credentials = credentials.with_quota_project('myproject-123')
+
+ Reauth is disabled by default. To enable reauth, set the
+ `enable_reauth_refresh` parameter to True in the constructor. Note that
+ reauth feature is intended for gcloud to use only.
+ If reauth is enabled, `pyu2f` dependency has to be installed in order to use security
+ key reauth feature. Dependency can be installed via `pip install pyu2f` or `pip install
+ google-auth[reauth]`.
+ """
+
+ def __init__(
+ self,
+ token,
+ refresh_token=None,
+ id_token=None,
+ token_uri=None,
+ client_id=None,
+ client_secret=None,
+ scopes=None,
+ default_scopes=None,
+ quota_project_id=None,
+ expiry=None,
+ rapt_token=None,
+ refresh_handler=None,
+ enable_reauth_refresh=False,
+ granted_scopes=None,
+ trust_boundary=None,
+ universe_domain=_DEFAULT_UNIVERSE_DOMAIN,
+ account=None,
+ ):
+ """
+ Args:
+ token (Optional(str)): The OAuth 2.0 access token. Can be None
+ if refresh information is provided.
+ refresh_token (str): The OAuth 2.0 refresh token. If specified,
+ credentials can be refreshed.
+ id_token (str): The Open ID Connect ID Token.
+ token_uri (str): The OAuth 2.0 authorization server's token
+ endpoint URI. Must be specified for refresh, can be left as
+ None if the token can not be refreshed.
+ client_id (str): The OAuth 2.0 client ID. Must be specified for
+ refresh, can be left as None if the token can not be refreshed.
+ client_secret(str): The OAuth 2.0 client secret. Must be specified
+ for refresh, can be left as None if the token can not be
+ refreshed.
+ scopes (Sequence[str]): The scopes used to obtain authorization.
+ This parameter is used by :meth:`has_scopes`. OAuth 2.0
+ credentials can not request additional scopes after
+ authorization. The scopes must be derivable from the refresh
+ token if refresh information is provided (e.g. The refresh
+ token scopes are a superset of this or contain a wild card
+ scope like 'https://www.googleapis.com/auth/any-api').
+ default_scopes (Sequence[str]): Default scopes passed by a
+ Google client library. Use 'scopes' for user-defined scopes.
+ quota_project_id (Optional[str]): The project ID used for quota and billing.
+ This project may be different from the project used to
+ create the credentials.
+ rapt_token (Optional[str]): The reauth Proof Token.
+ refresh_handler (Optional[Callable[[google.auth.transport.Request, Sequence[str]], [str, datetime]]]):
+ A callable which takes in the HTTP request callable and the list of
+ OAuth scopes and when called returns an access token string for the
+ requested scopes and its expiry datetime. This is useful when no
+ refresh tokens are provided and tokens are obtained by calling
+ some external process on demand. It is particularly useful for
+ retrieving downscoped tokens from a token broker.
+ enable_reauth_refresh (Optional[bool]): Whether reauth refresh flow
+ should be used. This flag is for gcloud to use only.
+ granted_scopes (Optional[Sequence[str]]): The scopes that were consented/granted by the user.
+ This could be different from the requested scopes and it could be empty if granted
+ and requested scopes were same.
+ trust_boundary (str): String representation of trust boundary meta.
+ universe_domain (Optional[str]): The universe domain. The default
+ universe domain is googleapis.com.
+ account (Optional[str]): The account associated with the credential.
+ """
+ super(Credentials, self).__init__()
+ self.token = token
+ self.expiry = expiry
+ self._refresh_token = refresh_token
+ self._id_token = id_token
+ self._scopes = scopes
+ self._default_scopes = default_scopes
+ self._granted_scopes = granted_scopes
+ self._token_uri = token_uri
+ self._client_id = client_id
+ self._client_secret = client_secret
+ self._quota_project_id = quota_project_id
+ self._rapt_token = rapt_token
+ self.refresh_handler = refresh_handler
+ self._enable_reauth_refresh = enable_reauth_refresh
+ self._trust_boundary = trust_boundary
+ self._universe_domain = universe_domain or _DEFAULT_UNIVERSE_DOMAIN
+ self._account = account or ""
+
+ def __getstate__(self):
+ """A __getstate__ method must exist for the __setstate__ to be called
+ This is identical to the default implementation.
+ See https://docs.python.org/3.7/library/pickle.html#object.__setstate__
+ """
+ state_dict = self.__dict__.copy()
+ # Remove _refresh_handler function as there are limitations pickling and
+ # unpickling certain callables (lambda, functools.partial instances)
+ # because they need to be importable.
+ # Instead, the refresh_handler setter should be used to repopulate this.
+ if "_refresh_handler" in state_dict:
+ del state_dict["_refresh_handler"]
+
+ if "_refresh_worker" in state_dict:
+ del state_dict["_refresh_worker"]
+ return state_dict
+
+ def __setstate__(self, d):
+ """Credentials pickled with older versions of the class do not have
+ all the attributes."""
+ self.token = d.get("token")
+ self.expiry = d.get("expiry")
+ self._refresh_token = d.get("_refresh_token")
+ self._id_token = d.get("_id_token")
+ self._scopes = d.get("_scopes")
+ self._default_scopes = d.get("_default_scopes")
+ self._granted_scopes = d.get("_granted_scopes")
+ self._token_uri = d.get("_token_uri")
+ self._client_id = d.get("_client_id")
+ self._client_secret = d.get("_client_secret")
+ self._quota_project_id = d.get("_quota_project_id")
+ self._rapt_token = d.get("_rapt_token")
+ self._enable_reauth_refresh = d.get("_enable_reauth_refresh")
+ self._trust_boundary = d.get("_trust_boundary")
+ self._universe_domain = d.get("_universe_domain") or _DEFAULT_UNIVERSE_DOMAIN
+ # The refresh_handler setter should be used to repopulate this.
+ self._refresh_handler = None
+ self._refresh_worker = None
+ self._use_non_blocking_refresh = d.get("_use_non_blocking_refresh", False)
+ self._account = d.get("_account", "")
+
+ @property
+ def refresh_token(self):
+ """Optional[str]: The OAuth 2.0 refresh token."""
+ return self._refresh_token
+
+ @property
+ def scopes(self):
+ """Optional[str]: The OAuth 2.0 permission scopes."""
+ return self._scopes
+
+ @property
+ def granted_scopes(self):
+ """Optional[Sequence[str]]: The OAuth 2.0 permission scopes that were granted by the user."""
+ return self._granted_scopes
+
+ @property
+ def token_uri(self):
+ """Optional[str]: The OAuth 2.0 authorization server's token endpoint
+ URI."""
+ return self._token_uri
+
+ @property
+ def id_token(self):
+ """Optional[str]: The Open ID Connect ID Token.
+
+ Depending on the authorization server and the scopes requested, this
+ may be populated when credentials are obtained and updated when
+ :meth:`refresh` is called. This token is a JWT. It can be verified
+ and decoded using :func:`google.oauth2.id_token.verify_oauth2_token`.
+ """
+ return self._id_token
+
+ @property
+ def client_id(self):
+ """Optional[str]: The OAuth 2.0 client ID."""
+ return self._client_id
+
+ @property
+ def client_secret(self):
+ """Optional[str]: The OAuth 2.0 client secret."""
+ return self._client_secret
+
+ @property
+ def requires_scopes(self):
+ """False: OAuth 2.0 credentials have their scopes set when
+ the initial token is requested and can not be changed."""
+ return False
+
+ @property
+ def rapt_token(self):
+ """Optional[str]: The reauth Proof Token."""
+ return self._rapt_token
+
+ @property
+ def refresh_handler(self):
+ """Returns the refresh handler if available.
+
+ Returns:
+ Optional[Callable[[google.auth.transport.Request, Sequence[str]], [str, datetime]]]:
+ The current refresh handler.
+ """
+ return self._refresh_handler
+
+ @refresh_handler.setter
+ def refresh_handler(self, value):
+ """Updates the current refresh handler.
+
+ Args:
+ value (Optional[Callable[[google.auth.transport.Request, Sequence[str]], [str, datetime]]]):
+ The updated value of the refresh handler.
+
+ Raises:
+ TypeError: If the value is not a callable or None.
+ """
+ if not callable(value) and value is not None:
+ raise TypeError("The provided refresh_handler is not a callable or None.")
+ self._refresh_handler = value
+
+ @property
+ def account(self):
+ """str: The user account associated with the credential. If the account is unknown an empty string is returned."""
+ return self._account
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+
+ return self.__class__(
+ self.token,
+ refresh_token=self.refresh_token,
+ id_token=self.id_token,
+ token_uri=self.token_uri,
+ client_id=self.client_id,
+ client_secret=self.client_secret,
+ scopes=self.scopes,
+ default_scopes=self.default_scopes,
+ granted_scopes=self.granted_scopes,
+ quota_project_id=quota_project_id,
+ rapt_token=self.rapt_token,
+ enable_reauth_refresh=self._enable_reauth_refresh,
+ trust_boundary=self._trust_boundary,
+ universe_domain=self._universe_domain,
+ account=self._account,
+ )
+
+ @_helpers.copy_docstring(credentials.CredentialsWithTokenUri)
+ def with_token_uri(self, token_uri):
+
+ return self.__class__(
+ self.token,
+ refresh_token=self.refresh_token,
+ id_token=self.id_token,
+ token_uri=token_uri,
+ client_id=self.client_id,
+ client_secret=self.client_secret,
+ scopes=self.scopes,
+ default_scopes=self.default_scopes,
+ granted_scopes=self.granted_scopes,
+ quota_project_id=self.quota_project_id,
+ rapt_token=self.rapt_token,
+ enable_reauth_refresh=self._enable_reauth_refresh,
+ trust_boundary=self._trust_boundary,
+ universe_domain=self._universe_domain,
+ account=self._account,
+ )
+
+ def with_account(self, account):
+ """Returns a copy of these credentials with a modified account.
+
+ Args:
+ account (str): The account to set
+
+ Returns:
+ google.oauth2.credentials.Credentials: A new credentials instance.
+ """
+
+ return self.__class__(
+ self.token,
+ refresh_token=self.refresh_token,
+ id_token=self.id_token,
+ token_uri=self._token_uri,
+ client_id=self.client_id,
+ client_secret=self.client_secret,
+ scopes=self.scopes,
+ default_scopes=self.default_scopes,
+ granted_scopes=self.granted_scopes,
+ quota_project_id=self.quota_project_id,
+ rapt_token=self.rapt_token,
+ enable_reauth_refresh=self._enable_reauth_refresh,
+ trust_boundary=self._trust_boundary,
+ universe_domain=self._universe_domain,
+ account=account,
+ )
+
+ @_helpers.copy_docstring(credentials.CredentialsWithUniverseDomain)
+ def with_universe_domain(self, universe_domain):
+
+ return self.__class__(
+ self.token,
+ refresh_token=self.refresh_token,
+ id_token=self.id_token,
+ token_uri=self._token_uri,
+ client_id=self.client_id,
+ client_secret=self.client_secret,
+ scopes=self.scopes,
+ default_scopes=self.default_scopes,
+ granted_scopes=self.granted_scopes,
+ quota_project_id=self.quota_project_id,
+ rapt_token=self.rapt_token,
+ enable_reauth_refresh=self._enable_reauth_refresh,
+ trust_boundary=self._trust_boundary,
+ universe_domain=universe_domain,
+ account=self._account,
+ )
+
+ def _metric_header_for_usage(self):
+ return metrics.CRED_TYPE_USER
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def refresh(self, request):
+ if self._universe_domain != _DEFAULT_UNIVERSE_DOMAIN:
+ raise exceptions.RefreshError(
+ "User credential refresh is only supported in the default "
+ "googleapis.com universe domain, but the current universe "
+ "domain is {}. If you created the credential with an access "
+ "token, it's likely that the provided token is expired now, "
+ "please update your code with a valid token.".format(
+ self._universe_domain
+ )
+ )
+
+ scopes = self._scopes if self._scopes is not None else self._default_scopes
+ # Use refresh handler if available and no refresh token is
+ # available. This is useful in general when tokens are obtained by calling
+ # some external process on demand. It is particularly useful for retrieving
+ # downscoped tokens from a token broker.
+ if self._refresh_token is None and self.refresh_handler:
+ token, expiry = self.refresh_handler(request, scopes=scopes)
+ # Validate returned data.
+ if not isinstance(token, str):
+ raise exceptions.RefreshError(
+ "The refresh_handler returned token is not a string."
+ )
+ if not isinstance(expiry, datetime):
+ raise exceptions.RefreshError(
+ "The refresh_handler returned expiry is not a datetime object."
+ )
+ if _helpers.utcnow() >= expiry - _helpers.REFRESH_THRESHOLD:
+ raise exceptions.RefreshError(
+ "The credentials returned by the refresh_handler are "
+ "already expired."
+ )
+ self.token = token
+ self.expiry = expiry
+ return
+
+ if (
+ self._refresh_token is None
+ or self._token_uri is None
+ or self._client_id is None
+ or self._client_secret is None
+ ):
+ raise exceptions.RefreshError(
+ "The credentials do not contain the necessary fields need to "
+ "refresh the access token. You must specify refresh_token, "
+ "token_uri, client_id, and client_secret."
+ )
+
+ (
+ access_token,
+ refresh_token,
+ expiry,
+ grant_response,
+ rapt_token,
+ ) = reauth.refresh_grant(
+ request,
+ self._token_uri,
+ self._refresh_token,
+ self._client_id,
+ self._client_secret,
+ scopes=scopes,
+ rapt_token=self._rapt_token,
+ enable_reauth_refresh=self._enable_reauth_refresh,
+ )
+
+ self.token = access_token
+ self.expiry = expiry
+ self._refresh_token = refresh_token
+ self._id_token = grant_response.get("id_token")
+ self._rapt_token = rapt_token
+
+ if scopes and "scope" in grant_response:
+ requested_scopes = frozenset(scopes)
+ self._granted_scopes = grant_response["scope"].split()
+ granted_scopes = frozenset(self._granted_scopes)
+ scopes_requested_but_not_granted = requested_scopes - granted_scopes
+ if scopes_requested_but_not_granted:
+ # User might be presented with unbundled scopes at the time of
+ # consent. So it is a valid scenario to not have all the requested
+ # scopes as part of granted scopes but log a warning in case the
+ # developer wants to debug the scenario.
+ _LOGGER.warning(
+ "Not all requested scopes were granted by the "
+ "authorization server, missing scopes {}.".format(
+ ", ".join(scopes_requested_but_not_granted)
+ )
+ )
+
+ @classmethod
+ def from_authorized_user_info(cls, info, scopes=None):
+ """Creates a Credentials instance from parsed authorized user info.
+
+ Args:
+ info (Mapping[str, str]): The authorized user info in Google
+ format.
+ scopes (Sequence[str]): Optional list of scopes to include in the
+ credentials.
+
+ Returns:
+ google.oauth2.credentials.Credentials: The constructed
+ credentials.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ keys_needed = set(("refresh_token", "client_id", "client_secret"))
+ missing = keys_needed.difference(info.keys())
+
+ if missing:
+ raise ValueError(
+ "Authorized user info was not in the expected format, missing "
+ "fields {}.".format(", ".join(missing))
+ )
+
+ # access token expiry (datetime obj); auto-expire if not saved
+ expiry = info.get("expiry")
+ if expiry:
+ expiry = datetime.strptime(
+ expiry.rstrip("Z").split(".")[0], "%Y-%m-%dT%H:%M:%S"
+ )
+ else:
+ expiry = _helpers.utcnow() - _helpers.REFRESH_THRESHOLD
+
+ # process scopes, which needs to be a seq
+ if scopes is None and "scopes" in info:
+ scopes = info.get("scopes")
+ if isinstance(scopes, str):
+ scopes = scopes.split(" ")
+
+ return cls(
+ token=info.get("token"),
+ refresh_token=info.get("refresh_token"),
+ token_uri=_GOOGLE_OAUTH2_TOKEN_ENDPOINT, # always overrides
+ scopes=scopes,
+ client_id=info.get("client_id"),
+ client_secret=info.get("client_secret"),
+ quota_project_id=info.get("quota_project_id"), # may not exist
+ expiry=expiry,
+ rapt_token=info.get("rapt_token"), # may not exist
+ trust_boundary=info.get("trust_boundary"), # may not exist
+ universe_domain=info.get("universe_domain"), # may not exist
+ account=info.get("account", ""), # may not exist
+ )
+
+ @classmethod
+ def from_authorized_user_file(cls, filename, scopes=None):
+ """Creates a Credentials instance from an authorized user json file.
+
+ Args:
+ filename (str): The path to the authorized user json file.
+ scopes (Sequence[str]): Optional list of scopes to include in the
+ credentials.
+
+ Returns:
+ google.oauth2.credentials.Credentials: The constructed
+ credentials.
+
+ Raises:
+ ValueError: If the file is not in the expected format.
+ """
+ with io.open(filename, "r", encoding="utf-8") as json_file:
+ data = json.load(json_file)
+ return cls.from_authorized_user_info(data, scopes)
+
+ def to_json(self, strip=None):
+ """Utility function that creates a JSON representation of a Credentials
+ object.
+
+ Args:
+ strip (Sequence[str]): Optional list of members to exclude from the
+ generated JSON.
+
+ Returns:
+ str: A JSON representation of this instance. When converted into
+ a dictionary, it can be passed to from_authorized_user_info()
+ to create a new credential instance.
+ """
+ prep = {
+ "token": self.token,
+ "refresh_token": self.refresh_token,
+ "token_uri": self.token_uri,
+ "client_id": self.client_id,
+ "client_secret": self.client_secret,
+ "scopes": self.scopes,
+ "rapt_token": self.rapt_token,
+ "universe_domain": self._universe_domain,
+ "account": self._account,
+ }
+ if self.expiry: # flatten expiry timestamp
+ prep["expiry"] = self.expiry.isoformat() + "Z"
+
+ # Remove empty entries (those which are None)
+ prep = {k: v for k, v in prep.items() if v is not None}
+
+ # Remove entries that explicitely need to be removed
+ if strip is not None:
+ prep = {k: v for k, v in prep.items() if k not in strip}
+
+ return json.dumps(prep)
+
+
+class UserAccessTokenCredentials(credentials.CredentialsWithQuotaProject):
+ """Access token credentials for user account.
+
+ Obtain the access token for a given user account or the current active
+ user account with the ``gcloud auth print-access-token`` command.
+
+ Args:
+ account (Optional[str]): Account to get the access token for. If not
+ specified, the current active account will be used.
+ quota_project_id (Optional[str]): The project ID used for quota
+ and billing.
+ """
+
+ def __init__(self, account=None, quota_project_id=None):
+ warnings.warn(
+ "UserAccessTokenCredentials is deprecated, please use "
+ "google.oauth2.credentials.Credentials instead. To use "
+ "that credential type, simply run "
+ "`gcloud auth application-default login` and let the "
+ "client libraries pick up the application default credentials."
+ )
+ super(UserAccessTokenCredentials, self).__init__()
+ self._account = account
+ self._quota_project_id = quota_project_id
+
+ def with_account(self, account):
+ """Create a new instance with the given account.
+
+ Args:
+ account (str): Account to get the access token for.
+
+ Returns:
+ google.oauth2.credentials.UserAccessTokenCredentials: The created
+ credentials with the given account.
+ """
+ return self.__class__(account=account, quota_project_id=self._quota_project_id)
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ return self.__class__(account=self._account, quota_project_id=quota_project_id)
+
+ def refresh(self, request):
+ """Refreshes the access token.
+
+ Args:
+ request (google.auth.transport.Request): This argument is required
+ by the base class interface but not used in this implementation,
+ so just set it to `None`.
+
+ Raises:
+ google.auth.exceptions.UserAccessTokenError: If the access token
+ refresh failed.
+ """
+ self.token = _cloud_sdk.get_auth_access_token(self._account)
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def before_request(self, request, method, url, headers):
+ self.refresh(request)
+ self.apply(headers)
diff --git a/Lib/site-packages/google/oauth2/gdch_credentials.py b/Lib/site-packages/google/oauth2/gdch_credentials.py
new file mode 100644
index 0000000..7410cfc
--- /dev/null
+++ b/Lib/site-packages/google/oauth2/gdch_credentials.py
@@ -0,0 +1,251 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Experimental GDCH credentials support.
+"""
+
+import datetime
+
+from google.auth import _helpers
+from google.auth import _service_account_info
+from google.auth import credentials
+from google.auth import exceptions
+from google.auth import jwt
+from google.oauth2 import _client
+
+
+TOKEN_EXCHANGE_TYPE = "urn:ietf:params:oauth:token-type:token-exchange"
+ACCESS_TOKEN_TOKEN_TYPE = "urn:ietf:params:oauth:token-type:access_token"
+SERVICE_ACCOUNT_TOKEN_TYPE = "urn:k8s:params:oauth:token-type:serviceaccount"
+JWT_LIFETIME = datetime.timedelta(seconds=3600) # 1 hour
+
+
+class ServiceAccountCredentials(credentials.Credentials):
+ """Credentials for GDCH (`Google Distributed Cloud Hosted`_) for service
+ account users.
+
+ .. _Google Distributed Cloud Hosted:
+ https://cloud.google.com/blog/topics/hybrid-cloud/\
+ announcing-google-distributed-cloud-edge-and-hosted
+
+ To create a GDCH service account credential, first create a JSON file of
+ the following format::
+
+ {
+ "type": "gdch_service_account",
+ "format_version": "1",
+ "project": "",
+ "private_key_id": "",
+ "private_key": "-----BEGIN EC PRIVATE KEY-----\n\n-----END EC PRIVATE KEY-----\n",
+ "name": "",
+ "ca_cert_path": "",
+ "token_uri": "https://service-identity./authenticate"
+ }
+
+ The "format_version" field stands for the format of the JSON file. For now
+ it is always "1". The `private_key_id` and `private_key` is used for signing.
+ The `ca_cert_path` is used for token server TLS certificate verification.
+
+ After the JSON file is created, set `GOOGLE_APPLICATION_CREDENTIALS` environment
+ variable to the JSON file path, then use the following code to create the
+ credential::
+
+ import google.auth
+
+ credential, _ = google.auth.default()
+ credential = credential.with_gdch_audience("")
+
+ We can also create the credential directly::
+
+ from google.oauth import gdch_credentials
+
+ credential = gdch_credentials.ServiceAccountCredentials.from_service_account_file("")
+ credential = credential.with_gdch_audience("")
+
+ The token is obtained in the following way. This class first creates a
+ self signed JWT. It uses the `name` value as the `iss` and `sub` claim, and
+ the `token_uri` as the `aud` claim, and signs the JWT with the `private_key`.
+ It then sends the JWT to the `token_uri` to exchange a final token for
+ `audience`.
+ """
+
+ def __init__(
+ self, signer, service_identity_name, project, audience, token_uri, ca_cert_path
+ ):
+ """
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ service_identity_name (str): The service identity name. It will be
+ used as the `iss` and `sub` claim in the self signed JWT.
+ project (str): The project.
+ audience (str): The audience for the final token.
+ token_uri (str): The token server uri.
+ ca_cert_path (str): The CA cert path for token server side TLS
+ certificate verification. If the token server uses well known
+ CA, then this parameter can be `None`.
+ """
+ super(ServiceAccountCredentials, self).__init__()
+ self._signer = signer
+ self._service_identity_name = service_identity_name
+ self._project = project
+ self._audience = audience
+ self._token_uri = token_uri
+ self._ca_cert_path = ca_cert_path
+
+ def _create_jwt(self):
+ now = _helpers.utcnow()
+ expiry = now + JWT_LIFETIME
+ iss_sub_value = "system:serviceaccount:{}:{}".format(
+ self._project, self._service_identity_name
+ )
+
+ payload = {
+ "iss": iss_sub_value,
+ "sub": iss_sub_value,
+ "aud": self._token_uri,
+ "iat": _helpers.datetime_to_secs(now),
+ "exp": _helpers.datetime_to_secs(expiry),
+ }
+
+ return _helpers.from_bytes(jwt.encode(self._signer, payload))
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def refresh(self, request):
+ import google.auth.transport.requests
+
+ if not isinstance(request, google.auth.transport.requests.Request):
+ raise exceptions.RefreshError(
+ "For GDCH service account credentials, request must be a google.auth.transport.requests.Request object"
+ )
+
+ # Create a self signed JWT, and do token exchange.
+ jwt_token = self._create_jwt()
+ request_body = {
+ "grant_type": TOKEN_EXCHANGE_TYPE,
+ "audience": self._audience,
+ "requested_token_type": ACCESS_TOKEN_TOKEN_TYPE,
+ "subject_token": jwt_token,
+ "subject_token_type": SERVICE_ACCOUNT_TOKEN_TYPE,
+ }
+ response_data = _client._token_endpoint_request(
+ request,
+ self._token_uri,
+ request_body,
+ access_token=None,
+ use_json=True,
+ verify=self._ca_cert_path,
+ )
+
+ self.token, _, self.expiry, _ = _client._handle_refresh_grant_response(
+ response_data, None
+ )
+
+ def with_gdch_audience(self, audience):
+ """Create a copy of GDCH credentials with the specified audience.
+
+ Args:
+ audience (str): The intended audience for GDCH credentials.
+ """
+ return self.__class__(
+ self._signer,
+ self._service_identity_name,
+ self._project,
+ audience,
+ self._token_uri,
+ self._ca_cert_path,
+ )
+
+ @classmethod
+ def _from_signer_and_info(cls, signer, info):
+ """Creates a Credentials instance from a signer and service account
+ info.
+
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ info (Mapping[str, str]): The service account info.
+
+ Returns:
+ google.oauth2.gdch_credentials.ServiceAccountCredentials: The constructed
+ credentials.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ if info["format_version"] != "1":
+ raise ValueError("Only format version 1 is supported")
+
+ return cls(
+ signer,
+ info["name"], # service_identity_name
+ info["project"],
+ None, # audience
+ info["token_uri"],
+ info.get("ca_cert_path", None),
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info):
+ """Creates a Credentials instance from parsed service account info.
+
+ Args:
+ info (Mapping[str, str]): The service account info in Google
+ format.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.oauth2.gdch_credentials.ServiceAccountCredentials: The constructed
+ credentials.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ signer = _service_account_info.from_dict(
+ info,
+ require=[
+ "format_version",
+ "private_key_id",
+ "private_key",
+ "name",
+ "project",
+ "token_uri",
+ ],
+ use_rsa_signer=False,
+ )
+ return cls._from_signer_and_info(signer, info)
+
+ @classmethod
+ def from_service_account_file(cls, filename):
+ """Creates a Credentials instance from a service account json file.
+
+ Args:
+ filename (str): The path to the service account json file.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.oauth2.gdch_credentials.ServiceAccountCredentials: The constructed
+ credentials.
+ """
+ info, signer = _service_account_info.from_filename(
+ filename,
+ require=[
+ "format_version",
+ "private_key_id",
+ "private_key",
+ "name",
+ "project",
+ "token_uri",
+ ],
+ use_rsa_signer=False,
+ )
+ return cls._from_signer_and_info(signer, info)
diff --git a/Lib/site-packages/google/oauth2/id_token.py b/Lib/site-packages/google/oauth2/id_token.py
new file mode 100644
index 0000000..e5dda50
--- /dev/null
+++ b/Lib/site-packages/google/oauth2/id_token.py
@@ -0,0 +1,340 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google ID Token helpers.
+
+Provides support for verifying `OpenID Connect ID Tokens`_, especially ones
+generated by Google infrastructure.
+
+To parse and verify an ID Token issued by Google's OAuth 2.0 authorization
+server use :func:`verify_oauth2_token`. To verify an ID Token issued by
+Firebase, use :func:`verify_firebase_token`.
+
+A general purpose ID Token verifier is available as :func:`verify_token`.
+
+Example::
+
+ from google.oauth2 import id_token
+ from google.auth.transport import requests
+
+ request = requests.Request()
+
+ id_info = id_token.verify_oauth2_token(
+ token, request, 'my-client-id.example.com')
+
+ userid = id_info['sub']
+
+By default, this will re-fetch certificates for each verification. Because
+Google's public keys are only changed infrequently (on the order of once per
+day), you may wish to take advantage of caching to reduce latency and the
+potential for network errors. This can be accomplished using an external
+library like `CacheControl`_ to create a cache-aware
+:class:`google.auth.transport.Request`::
+
+ import cachecontrol
+ import google.auth.transport.requests
+ import requests
+
+ session = requests.session()
+ cached_session = cachecontrol.CacheControl(session)
+ request = google.auth.transport.requests.Request(session=cached_session)
+
+.. _OpenID Connect ID Tokens:
+ http://openid.net/specs/openid-connect-core-1_0.html#IDToken
+.. _CacheControl: https://cachecontrol.readthedocs.io
+"""
+
+import http.client as http_client
+import json
+import os
+
+from google.auth import environment_vars
+from google.auth import exceptions
+from google.auth import jwt
+
+
+# The URL that provides public certificates for verifying ID tokens issued
+# by Google's OAuth 2.0 authorization server.
+_GOOGLE_OAUTH2_CERTS_URL = "https://www.googleapis.com/oauth2/v1/certs"
+
+# The URL that provides public certificates for verifying ID tokens issued
+# by Firebase and the Google APIs infrastructure
+_GOOGLE_APIS_CERTS_URL = (
+ "https://www.googleapis.com/robot/v1/metadata/x509"
+ "/securetoken@system.gserviceaccount.com"
+)
+
+_GOOGLE_ISSUERS = ["accounts.google.com", "https://accounts.google.com"]
+
+
+def _fetch_certs(request, certs_url):
+ """Fetches certificates.
+
+ Google-style cerificate endpoints return JSON in the format of
+ ``{'key id': 'x509 certificate'}``.
+
+ Args:
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ certs_url (str): The certificate endpoint URL.
+
+ Returns:
+ Mapping[str, str]: A mapping of public key ID to x.509 certificate
+ data.
+ """
+ response = request(certs_url, method="GET")
+
+ if response.status != http_client.OK:
+ raise exceptions.TransportError(
+ "Could not fetch certificates at {}".format(certs_url)
+ )
+
+ return json.loads(response.data.decode("utf-8"))
+
+
+def verify_token(
+ id_token,
+ request,
+ audience=None,
+ certs_url=_GOOGLE_OAUTH2_CERTS_URL,
+ clock_skew_in_seconds=0,
+):
+ """Verifies an ID token and returns the decoded token.
+
+ Args:
+ id_token (Union[str, bytes]): The encoded token.
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ audience (str or list): The audience or audiences that this token is
+ intended for. If None then the audience is not verified.
+ certs_url (str): The URL that specifies the certificates to use to
+ verify the token. This URL should return JSON in the format of
+ ``{'key id': 'x509 certificate'}``.
+ clock_skew_in_seconds (int): The clock skew used for `iat` and `exp`
+ validation.
+
+ Returns:
+ Mapping[str, Any]: The decoded token.
+ """
+ certs = _fetch_certs(request, certs_url)
+
+ return jwt.decode(
+ id_token,
+ certs=certs,
+ audience=audience,
+ clock_skew_in_seconds=clock_skew_in_seconds,
+ )
+
+
+def verify_oauth2_token(id_token, request, audience=None, clock_skew_in_seconds=0):
+ """Verifies an ID Token issued by Google's OAuth 2.0 authorization server.
+
+ Args:
+ id_token (Union[str, bytes]): The encoded token.
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ audience (str): The audience that this token is intended for. This is
+ typically your application's OAuth 2.0 client ID. If None then the
+ audience is not verified.
+ clock_skew_in_seconds (int): The clock skew used for `iat` and `exp`
+ validation.
+
+ Returns:
+ Mapping[str, Any]: The decoded token.
+
+ Raises:
+ exceptions.GoogleAuthError: If the issuer is invalid.
+ ValueError: If token verification fails
+ """
+ idinfo = verify_token(
+ id_token,
+ request,
+ audience=audience,
+ certs_url=_GOOGLE_OAUTH2_CERTS_URL,
+ clock_skew_in_seconds=clock_skew_in_seconds,
+ )
+
+ if idinfo["iss"] not in _GOOGLE_ISSUERS:
+ raise exceptions.GoogleAuthError(
+ "Wrong issuer. 'iss' should be one of the following: {}".format(
+ _GOOGLE_ISSUERS
+ )
+ )
+
+ return idinfo
+
+
+def verify_firebase_token(id_token, request, audience=None, clock_skew_in_seconds=0):
+ """Verifies an ID Token issued by Firebase Authentication.
+
+ Args:
+ id_token (Union[str, bytes]): The encoded token.
+ request (google.auth.transport.Request): The object used to make
+ HTTP requests.
+ audience (str): The audience that this token is intended for. This is
+ typically your Firebase application ID. If None then the audience
+ is not verified.
+ clock_skew_in_seconds (int): The clock skew used for `iat` and `exp`
+ validation.
+
+ Returns:
+ Mapping[str, Any]: The decoded token.
+ """
+ return verify_token(
+ id_token,
+ request,
+ audience=audience,
+ certs_url=_GOOGLE_APIS_CERTS_URL,
+ clock_skew_in_seconds=clock_skew_in_seconds,
+ )
+
+
+def fetch_id_token_credentials(audience, request=None):
+ """Create the ID Token credentials from the current environment.
+
+ This function acquires ID token from the environment in the following order.
+ See https://google.aip.dev/auth/4110.
+
+ 1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
+ to the path of a valid service account JSON file, then ID token is
+ acquired using this service account credentials.
+ 2. If the application is running in Compute Engine, App Engine or Cloud Run,
+ then the ID token are obtained from the metadata server.
+ 3. If metadata server doesn't exist and no valid service account credentials
+ are found, :class:`~google.auth.exceptions.DefaultCredentialsError` will
+ be raised.
+
+ Example::
+
+ import google.oauth2.id_token
+ import google.auth.transport.requests
+
+ request = google.auth.transport.requests.Request()
+ target_audience = "https://pubsub.googleapis.com"
+
+ # Create ID token credentials.
+ credentials = google.oauth2.id_token.fetch_id_token_credentials(target_audience, request=request)
+
+ # Refresh the credential to obtain an ID token.
+ credentials.refresh(request)
+
+ id_token = credentials.token
+ id_token_expiry = credentials.expiry
+
+ Args:
+ audience (str): The audience that this ID token is intended for.
+ request (Optional[google.auth.transport.Request]): A callable used to make
+ HTTP requests. A request object will be created if not provided.
+
+ Returns:
+ google.auth.credentials.Credentials: The ID token credentials.
+
+ Raises:
+ ~google.auth.exceptions.DefaultCredentialsError:
+ If metadata server doesn't exist and no valid service account
+ credentials are found.
+ """
+ # 1. Try to get credentials from the GOOGLE_APPLICATION_CREDENTIALS environment
+ # variable.
+ credentials_filename = os.environ.get(environment_vars.CREDENTIALS)
+ if credentials_filename:
+ if not (
+ os.path.exists(credentials_filename)
+ and os.path.isfile(credentials_filename)
+ ):
+ raise exceptions.DefaultCredentialsError(
+ "GOOGLE_APPLICATION_CREDENTIALS path is either not found or invalid."
+ )
+
+ try:
+ with open(credentials_filename, "r") as f:
+ from google.oauth2 import service_account
+
+ info = json.load(f)
+ if info.get("type") == "service_account":
+ return service_account.IDTokenCredentials.from_service_account_info(
+ info, target_audience=audience
+ )
+ except ValueError as caught_exc:
+ new_exc = exceptions.DefaultCredentialsError(
+ "GOOGLE_APPLICATION_CREDENTIALS is not valid service account credentials.",
+ caught_exc,
+ )
+ raise new_exc from caught_exc
+
+ # 2. Try to fetch ID token from metada server if it exists. The code
+ # works for GAE and Cloud Run metadata server as well.
+ try:
+ from google.auth import compute_engine
+ from google.auth.compute_engine import _metadata
+
+ # Create a request object if not provided.
+ if not request:
+ import google.auth.transport.requests
+
+ request = google.auth.transport.requests.Request()
+
+ if _metadata.ping(request):
+ return compute_engine.IDTokenCredentials(
+ request, audience, use_metadata_identity_endpoint=True
+ )
+ except (ImportError, exceptions.TransportError):
+ pass
+
+ raise exceptions.DefaultCredentialsError(
+ "Neither metadata server or valid service account credentials are found."
+ )
+
+
+def fetch_id_token(request, audience):
+ """Fetch the ID Token from the current environment.
+
+ This function acquires ID token from the environment in the following order.
+ See https://google.aip.dev/auth/4110.
+
+ 1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
+ to the path of a valid service account JSON file, then ID token is
+ acquired using this service account credentials.
+ 2. If the application is running in Compute Engine, App Engine or Cloud Run,
+ then the ID token are obtained from the metadata server.
+ 3. If metadata server doesn't exist and no valid service account credentials
+ are found, :class:`~google.auth.exceptions.DefaultCredentialsError` will
+ be raised.
+
+ Example::
+
+ import google.oauth2.id_token
+ import google.auth.transport.requests
+
+ request = google.auth.transport.requests.Request()
+ target_audience = "https://pubsub.googleapis.com"
+
+ id_token = google.oauth2.id_token.fetch_id_token(request, target_audience)
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ audience (str): The audience that this ID token is intended for.
+
+ Returns:
+ str: The ID token.
+
+ Raises:
+ ~google.auth.exceptions.DefaultCredentialsError:
+ If metadata server doesn't exist and no valid service account
+ credentials are found.
+ """
+ id_token_credentials = fetch_id_token_credentials(audience, request=request)
+ id_token_credentials.refresh(request)
+ return id_token_credentials.token
diff --git a/Lib/site-packages/google/oauth2/reauth.py b/Lib/site-packages/google/oauth2/reauth.py
new file mode 100644
index 0000000..5870347
--- /dev/null
+++ b/Lib/site-packages/google/oauth2/reauth.py
@@ -0,0 +1,368 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A module that provides functions for handling rapt authentication.
+
+Reauth is a process of obtaining additional authentication (such as password,
+security token, etc.) while refreshing OAuth 2.0 credentials for a user.
+
+Credentials that use the Reauth flow must have the reauth scope,
+``https://www.googleapis.com/auth/accounts.reauth``.
+
+This module provides a high-level function for executing the Reauth process,
+:func:`refresh_grant`, and lower-level helpers for doing the individual
+steps of the reauth process.
+
+Those steps are:
+
+1. Obtaining a list of challenges from the reauth server.
+2. Running through each challenge and sending the result back to the reauth
+ server.
+3. Refreshing the access token using the returned rapt token.
+"""
+
+import sys
+
+from google.auth import exceptions
+from google.auth import metrics
+from google.oauth2 import _client
+from google.oauth2 import challenges
+
+
+_REAUTH_SCOPE = "https://www.googleapis.com/auth/accounts.reauth"
+_REAUTH_API = "https://reauth.googleapis.com/v2/sessions"
+
+_REAUTH_NEEDED_ERROR = "invalid_grant"
+_REAUTH_NEEDED_ERROR_INVALID_RAPT = "invalid_rapt"
+_REAUTH_NEEDED_ERROR_RAPT_REQUIRED = "rapt_required"
+
+_AUTHENTICATED = "AUTHENTICATED"
+_CHALLENGE_REQUIRED = "CHALLENGE_REQUIRED"
+_CHALLENGE_PENDING = "CHALLENGE_PENDING"
+
+
+# Override this global variable to set custom max number of rounds of reauth
+# challenges should be run.
+RUN_CHALLENGE_RETRY_LIMIT = 5
+
+
+def is_interactive():
+ """Check if we are in an interractive environment.
+
+ Override this function with a different logic if you are using this library
+ outside a CLI.
+
+ If the rapt token needs refreshing, the user needs to answer the challenges.
+ If the user is not in an interractive environment, the challenges can not
+ be answered and we just wait for timeout for no reason.
+
+ Returns:
+ bool: True if is interactive environment, False otherwise.
+ """
+
+ return sys.stdin.isatty()
+
+
+def _get_challenges(
+ request, supported_challenge_types, access_token, requested_scopes=None
+):
+ """Does initial request to reauth API to get the challenges.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ supported_challenge_types (Sequence[str]): list of challenge names
+ supported by the manager.
+ access_token (str): Access token with reauth scopes.
+ requested_scopes (Optional(Sequence[str])): Authorized scopes for the credentials.
+
+ Returns:
+ dict: The response from the reauth API.
+ """
+ body = {"supportedChallengeTypes": supported_challenge_types}
+ if requested_scopes:
+ body["oauthScopesForDomainPolicyLookup"] = requested_scopes
+ metrics_header = {metrics.API_CLIENT_HEADER: metrics.reauth_start()}
+
+ return _client._token_endpoint_request(
+ request,
+ _REAUTH_API + ":start",
+ body,
+ access_token=access_token,
+ use_json=True,
+ headers=metrics_header,
+ )
+
+
+def _send_challenge_result(
+ request, session_id, challenge_id, client_input, access_token
+):
+ """Attempt to refresh access token by sending next challenge result.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ session_id (str): session id returned by the initial reauth call.
+ challenge_id (str): challenge id returned by the initial reauth call.
+ client_input: dict with a challenge-specific client input. For example:
+ ``{'credential': password}`` for password challenge.
+ access_token (str): Access token with reauth scopes.
+
+ Returns:
+ dict: The response from the reauth API.
+ """
+ body = {
+ "sessionId": session_id,
+ "challengeId": challenge_id,
+ "action": "RESPOND",
+ "proposalResponse": client_input,
+ }
+ metrics_header = {metrics.API_CLIENT_HEADER: metrics.reauth_continue()}
+
+ return _client._token_endpoint_request(
+ request,
+ _REAUTH_API + "/{}:continue".format(session_id),
+ body,
+ access_token=access_token,
+ use_json=True,
+ headers=metrics_header,
+ )
+
+
+def _run_next_challenge(msg, request, access_token):
+ """Get the next challenge from msg and run it.
+
+ Args:
+ msg (dict): Reauth API response body (either from the initial request to
+ https://reauth.googleapis.com/v2/sessions:start or from sending the
+ previous challenge response to
+ https://reauth.googleapis.com/v2/sessions/id:continue)
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ access_token (str): reauth access token
+
+ Returns:
+ dict: The response from the reauth API.
+
+ Raises:
+ google.auth.exceptions.ReauthError: if reauth failed.
+ """
+ for challenge in msg["challenges"]:
+ if challenge["status"] != "READY":
+ # Skip non-activated challenges.
+ continue
+ c = challenges.AVAILABLE_CHALLENGES.get(challenge["challengeType"], None)
+ if not c:
+ raise exceptions.ReauthFailError(
+ "Unsupported challenge type {0}. Supported types: {1}".format(
+ challenge["challengeType"],
+ ",".join(list(challenges.AVAILABLE_CHALLENGES.keys())),
+ )
+ )
+ if not c.is_locally_eligible:
+ raise exceptions.ReauthFailError(
+ "Challenge {0} is not locally eligible".format(
+ challenge["challengeType"]
+ )
+ )
+ client_input = c.obtain_challenge_input(challenge)
+ if not client_input:
+ return None
+ return _send_challenge_result(
+ request,
+ msg["sessionId"],
+ challenge["challengeId"],
+ client_input,
+ access_token,
+ )
+ return None
+
+
+def _obtain_rapt(request, access_token, requested_scopes):
+ """Given an http request method and reauth access token, get rapt token.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ access_token (str): reauth access token
+ requested_scopes (Sequence[str]): scopes required by the client application
+
+ Returns:
+ str: The rapt token.
+
+ Raises:
+ google.auth.exceptions.ReauthError: if reauth failed
+ """
+ msg = _get_challenges(
+ request,
+ list(challenges.AVAILABLE_CHALLENGES.keys()),
+ access_token,
+ requested_scopes,
+ )
+
+ if msg["status"] == _AUTHENTICATED:
+ return msg["encodedProofOfReauthToken"]
+
+ for _ in range(0, RUN_CHALLENGE_RETRY_LIMIT):
+ if not (
+ msg["status"] == _CHALLENGE_REQUIRED or msg["status"] == _CHALLENGE_PENDING
+ ):
+ raise exceptions.ReauthFailError(
+ "Reauthentication challenge failed due to API error: {}".format(
+ msg["status"]
+ )
+ )
+
+ if not is_interactive():
+ raise exceptions.ReauthFailError(
+ "Reauthentication challenge could not be answered because you are not"
+ " in an interactive session."
+ )
+
+ msg = _run_next_challenge(msg, request, access_token)
+
+ if not msg:
+ raise exceptions.ReauthFailError("Failed to obtain rapt token.")
+ if msg["status"] == _AUTHENTICATED:
+ return msg["encodedProofOfReauthToken"]
+
+ # If we got here it means we didn't get authenticated.
+ raise exceptions.ReauthFailError("Failed to obtain rapt token.")
+
+
+def get_rapt_token(
+ request, client_id, client_secret, refresh_token, token_uri, scopes=None
+):
+ """Given an http request method and refresh_token, get rapt token.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ client_id (str): client id to get access token for reauth scope.
+ client_secret (str): client secret for the client_id
+ refresh_token (str): refresh token to refresh access token
+ token_uri (str): uri to refresh access token
+ scopes (Optional(Sequence[str])): scopes required by the client application
+
+ Returns:
+ str: The rapt token.
+ Raises:
+ google.auth.exceptions.RefreshError: If reauth failed.
+ """
+ sys.stderr.write("Reauthentication required.\n")
+
+ # Get access token for reauth.
+ access_token, _, _, _ = _client.refresh_grant(
+ request=request,
+ client_id=client_id,
+ client_secret=client_secret,
+ refresh_token=refresh_token,
+ token_uri=token_uri,
+ scopes=[_REAUTH_SCOPE],
+ )
+
+ # Get rapt token from reauth API.
+ rapt_token = _obtain_rapt(request, access_token, requested_scopes=scopes)
+
+ return rapt_token
+
+
+def refresh_grant(
+ request,
+ token_uri,
+ refresh_token,
+ client_id,
+ client_secret,
+ scopes=None,
+ rapt_token=None,
+ enable_reauth_refresh=False,
+):
+ """Implements the reauthentication flow.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
+ URI.
+ refresh_token (str): The refresh token to use to get a new access
+ token.
+ client_id (str): The OAuth 2.0 application's client ID.
+ client_secret (str): The Oauth 2.0 appliaction's client secret.
+ scopes (Optional(Sequence[str])): Scopes to request. If present, all
+ scopes must be authorized for the refresh token. Useful if refresh
+ token has a wild card scope (e.g.
+ 'https://www.googleapis.com/auth/any-api').
+ rapt_token (Optional(str)): The rapt token for reauth.
+ enable_reauth_refresh (Optional[bool]): Whether reauth refresh flow
+ should be used. The default value is False. This option is for
+ gcloud only, other users should use the default value.
+
+ Returns:
+ Tuple[str, Optional[str], Optional[datetime], Mapping[str, str], str]: The
+ access token, new refresh token, expiration, the additional data
+ returned by the token endpoint, and the rapt token.
+
+ Raises:
+ google.auth.exceptions.RefreshError: If the token endpoint returned
+ an error.
+ """
+ body = {
+ "grant_type": _client._REFRESH_GRANT_TYPE,
+ "client_id": client_id,
+ "client_secret": client_secret,
+ "refresh_token": refresh_token,
+ }
+ if scopes:
+ body["scope"] = " ".join(scopes)
+ if rapt_token:
+ body["rapt"] = rapt_token
+ metrics_header = {metrics.API_CLIENT_HEADER: metrics.token_request_user()}
+
+ response_status_ok, response_data, retryable_error = _client._token_endpoint_request_no_throw(
+ request, token_uri, body, headers=metrics_header
+ )
+
+ if not response_status_ok and isinstance(response_data, str):
+ raise exceptions.RefreshError(response_data, retryable=False)
+
+ if (
+ not response_status_ok
+ and response_data.get("error") == _REAUTH_NEEDED_ERROR
+ and (
+ response_data.get("error_subtype") == _REAUTH_NEEDED_ERROR_INVALID_RAPT
+ or response_data.get("error_subtype") == _REAUTH_NEEDED_ERROR_RAPT_REQUIRED
+ )
+ ):
+ if not enable_reauth_refresh:
+ raise exceptions.RefreshError(
+ "Reauthentication is needed. Please run `gcloud auth application-default login` to reauthenticate."
+ )
+
+ rapt_token = get_rapt_token(
+ request, client_id, client_secret, refresh_token, token_uri, scopes=scopes
+ )
+ body["rapt"] = rapt_token
+ (
+ response_status_ok,
+ response_data,
+ retryable_error,
+ ) = _client._token_endpoint_request_no_throw(
+ request, token_uri, body, headers=metrics_header
+ )
+
+ if not response_status_ok:
+ _client._handle_error_response(response_data, retryable_error)
+ return _client._handle_refresh_grant_response(response_data, refresh_token) + (
+ rapt_token,
+ )
diff --git a/Lib/site-packages/google/oauth2/service_account.py b/Lib/site-packages/google/oauth2/service_account.py
new file mode 100644
index 0000000..4502c6f
--- /dev/null
+++ b/Lib/site-packages/google/oauth2/service_account.py
@@ -0,0 +1,822 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Service Accounts: JSON Web Token (JWT) Profile for OAuth 2.0
+
+This module implements the JWT Profile for OAuth 2.0 Authorization Grants
+as defined by `RFC 7523`_ with particular support for how this RFC is
+implemented in Google's infrastructure. Google refers to these credentials
+as *Service Accounts*.
+
+Service accounts are used for server-to-server communication, such as
+interactions between a web application server and a Google service. The
+service account belongs to your application instead of to an individual end
+user. In contrast to other OAuth 2.0 profiles, no users are involved and your
+application "acts" as the service account.
+
+Typically an application uses a service account when the application uses
+Google APIs to work with its own data rather than a user's data. For example,
+an application that uses Google Cloud Datastore for data persistence would use
+a service account to authenticate its calls to the Google Cloud Datastore API.
+However, an application that needs to access a user's Drive documents would
+use the normal OAuth 2.0 profile.
+
+Additionally, Google Apps domain administrators can grant service accounts
+`domain-wide delegation`_ authority to access user data on behalf of users in
+the domain.
+
+This profile uses a JWT to acquire an OAuth 2.0 access token. The JWT is used
+in place of the usual authorization token returned during the standard
+OAuth 2.0 Authorization Code grant. The JWT is only used for this purpose, as
+the acquired access token is used as the bearer token when making requests
+using these credentials.
+
+This profile differs from normal OAuth 2.0 profile because no user consent
+step is required. The use of the private key allows this profile to assert
+identity directly.
+
+This profile also differs from the :mod:`google.auth.jwt` authentication
+because the JWT credentials use the JWT directly as the bearer token. This
+profile instead only uses the JWT to obtain an OAuth 2.0 access token. The
+obtained OAuth 2.0 access token is used as the bearer token.
+
+Domain-wide delegation
+----------------------
+
+Domain-wide delegation allows a service account to access user data on
+behalf of any user in a Google Apps domain without consent from the user.
+For example, an application that uses the Google Calendar API to add events to
+the calendars of all users in a Google Apps domain would use a service account
+to access the Google Calendar API on behalf of users.
+
+The Google Apps administrator must explicitly authorize the service account to
+do this. This authorization step is referred to as "delegating domain-wide
+authority" to a service account.
+
+You can use domain-wise delegation by creating a set of credentials with a
+specific subject using :meth:`~Credentials.with_subject`.
+
+.. _RFC 7523: https://tools.ietf.org/html/rfc7523
+"""
+
+import copy
+import datetime
+
+from google.auth import _helpers
+from google.auth import _service_account_info
+from google.auth import credentials
+from google.auth import exceptions
+from google.auth import jwt
+from google.auth import metrics
+from google.oauth2 import _client
+
+_DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
+_DEFAULT_UNIVERSE_DOMAIN = "googleapis.com"
+_GOOGLE_OAUTH2_TOKEN_ENDPOINT = "https://oauth2.googleapis.com/token"
+
+
+class Credentials(
+ credentials.Signing,
+ credentials.Scoped,
+ credentials.CredentialsWithQuotaProject,
+ credentials.CredentialsWithTokenUri,
+):
+ """Service account credentials
+
+ Usually, you'll create these credentials with one of the helper
+ constructors. To create credentials using a Google service account
+ private key JSON file::
+
+ credentials = service_account.Credentials.from_service_account_file(
+ 'service-account.json')
+
+ Or if you already have the service account file loaded::
+
+ service_account_info = json.load(open('service_account.json'))
+ credentials = service_account.Credentials.from_service_account_info(
+ service_account_info)
+
+ Both helper methods pass on arguments to the constructor, so you can
+ specify additional scopes and a subject if necessary::
+
+ credentials = service_account.Credentials.from_service_account_file(
+ 'service-account.json',
+ scopes=['email'],
+ subject='user@example.com')
+
+ The credentials are considered immutable. If you want to modify the scopes
+ or the subject used for delegation, use :meth:`with_scopes` or
+ :meth:`with_subject`::
+
+ scoped_credentials = credentials.with_scopes(['email'])
+ delegated_credentials = credentials.with_subject(subject)
+
+ To add a quota project, use :meth:`with_quota_project`::
+
+ credentials = credentials.with_quota_project('myproject-123')
+ """
+
+ def __init__(
+ self,
+ signer,
+ service_account_email,
+ token_uri,
+ scopes=None,
+ default_scopes=None,
+ subject=None,
+ project_id=None,
+ quota_project_id=None,
+ additional_claims=None,
+ always_use_jwt_access=False,
+ universe_domain=_DEFAULT_UNIVERSE_DOMAIN,
+ trust_boundary=None,
+ ):
+ """
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ service_account_email (str): The service account's email.
+ scopes (Sequence[str]): User-defined scopes to request during the
+ authorization grant.
+ default_scopes (Sequence[str]): Default scopes passed by a
+ Google client library. Use 'scopes' for user-defined scopes.
+ token_uri (str): The OAuth 2.0 Token URI.
+ subject (str): For domain-wide delegation, the email address of the
+ user to for which to request delegated access.
+ project_id (str): Project ID associated with the service account
+ credential.
+ quota_project_id (Optional[str]): The project ID used for quota and
+ billing.
+ additional_claims (Mapping[str, str]): Any additional claims for
+ the JWT assertion used in the authorization grant.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be always used.
+ universe_domain (str): The universe domain. The default
+ universe domain is googleapis.com. For default value self
+ signed jwt is used for token refresh.
+ trust_boundary (str): String representation of trust boundary meta.
+
+ .. note:: Typically one of the helper constructors
+ :meth:`from_service_account_file` or
+ :meth:`from_service_account_info` are used instead of calling the
+ constructor directly.
+ """
+ super(Credentials, self).__init__()
+
+ self._scopes = scopes
+ self._default_scopes = default_scopes
+ self._signer = signer
+ self._service_account_email = service_account_email
+ self._subject = subject
+ self._project_id = project_id
+ self._quota_project_id = quota_project_id
+ self._token_uri = token_uri
+ self._always_use_jwt_access = always_use_jwt_access
+ self._universe_domain = universe_domain or _DEFAULT_UNIVERSE_DOMAIN
+
+ if universe_domain != _DEFAULT_UNIVERSE_DOMAIN:
+ self._always_use_jwt_access = True
+
+ self._jwt_credentials = None
+
+ if additional_claims is not None:
+ self._additional_claims = additional_claims
+ else:
+ self._additional_claims = {}
+ self._trust_boundary = {"locations": [], "encoded_locations": "0x0"}
+
+ @classmethod
+ def _from_signer_and_info(cls, signer, info, **kwargs):
+ """Creates a Credentials instance from a signer and service account
+ info.
+
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ info (Mapping[str, str]): The service account info.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.Credentials: The constructed credentials.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ return cls(
+ signer,
+ service_account_email=info["client_email"],
+ token_uri=info["token_uri"],
+ project_id=info.get("project_id"),
+ universe_domain=info.get("universe_domain", _DEFAULT_UNIVERSE_DOMAIN),
+ trust_boundary=info.get("trust_boundary"),
+ **kwargs
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info, **kwargs):
+ """Creates a Credentials instance from parsed service account info.
+
+ Args:
+ info (Mapping[str, str]): The service account info in Google
+ format.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.service_account.Credentials: The constructed
+ credentials.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ signer = _service_account_info.from_dict(
+ info, require=["client_email", "token_uri"]
+ )
+ return cls._from_signer_and_info(signer, info, **kwargs)
+
+ @classmethod
+ def from_service_account_file(cls, filename, **kwargs):
+ """Creates a Credentials instance from a service account json file.
+
+ Args:
+ filename (str): The path to the service account json file.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.service_account.Credentials: The constructed
+ credentials.
+ """
+ info, signer = _service_account_info.from_filename(
+ filename, require=["client_email", "token_uri"]
+ )
+ return cls._from_signer_and_info(signer, info, **kwargs)
+
+ @property
+ def service_account_email(self):
+ """The service account email."""
+ return self._service_account_email
+
+ @property
+ def project_id(self):
+ """Project ID associated with this credential."""
+ return self._project_id
+
+ @property
+ def requires_scopes(self):
+ """Checks if the credentials requires scopes.
+
+ Returns:
+ bool: True if there are no scopes set otherwise False.
+ """
+ return True if not self._scopes else False
+
+ def _make_copy(self):
+ cred = self.__class__(
+ self._signer,
+ service_account_email=self._service_account_email,
+ scopes=copy.copy(self._scopes),
+ default_scopes=copy.copy(self._default_scopes),
+ token_uri=self._token_uri,
+ subject=self._subject,
+ project_id=self._project_id,
+ quota_project_id=self._quota_project_id,
+ additional_claims=self._additional_claims.copy(),
+ always_use_jwt_access=self._always_use_jwt_access,
+ universe_domain=self._universe_domain,
+ )
+ return cred
+
+ @_helpers.copy_docstring(credentials.Scoped)
+ def with_scopes(self, scopes, default_scopes=None):
+ cred = self._make_copy()
+ cred._scopes = scopes
+ cred._default_scopes = default_scopes
+ return cred
+
+ def with_always_use_jwt_access(self, always_use_jwt_access):
+ """Create a copy of these credentials with the specified always_use_jwt_access value.
+
+ Args:
+ always_use_jwt_access (bool): Whether always use self signed JWT or not.
+
+ Returns:
+ google.auth.service_account.Credentials: A new credentials
+ instance.
+ Raises:
+ google.auth.exceptions.InvalidValue: If the universe domain is not
+ default and always_use_jwt_access is False.
+ """
+ cred = self._make_copy()
+ if (
+ cred._universe_domain != _DEFAULT_UNIVERSE_DOMAIN
+ and not always_use_jwt_access
+ ):
+ raise exceptions.InvalidValue(
+ "always_use_jwt_access should be True for non-default universe domain"
+ )
+ cred._always_use_jwt_access = always_use_jwt_access
+ return cred
+
+ @_helpers.copy_docstring(credentials.CredentialsWithUniverseDomain)
+ def with_universe_domain(self, universe_domain):
+ cred = self._make_copy()
+ cred._universe_domain = universe_domain
+ if universe_domain != _DEFAULT_UNIVERSE_DOMAIN:
+ cred._always_use_jwt_access = True
+ return cred
+
+ def with_subject(self, subject):
+ """Create a copy of these credentials with the specified subject.
+
+ Args:
+ subject (str): The subject claim.
+
+ Returns:
+ google.auth.service_account.Credentials: A new credentials
+ instance.
+ """
+ cred = self._make_copy()
+ cred._subject = subject
+ return cred
+
+ def with_claims(self, additional_claims):
+ """Returns a copy of these credentials with modified claims.
+
+ Args:
+ additional_claims (Mapping[str, str]): Any additional claims for
+ the JWT payload. This will be merged with the current
+ additional claims.
+
+ Returns:
+ google.auth.service_account.Credentials: A new credentials
+ instance.
+ """
+ new_additional_claims = copy.deepcopy(self._additional_claims)
+ new_additional_claims.update(additional_claims or {})
+ cred = self._make_copy()
+ cred._additional_claims = new_additional_claims
+ return cred
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ cred = self._make_copy()
+ cred._quota_project_id = quota_project_id
+ return cred
+
+ @_helpers.copy_docstring(credentials.CredentialsWithTokenUri)
+ def with_token_uri(self, token_uri):
+ cred = self._make_copy()
+ cred._token_uri = token_uri
+ return cred
+
+ def _make_authorization_grant_assertion(self):
+ """Create the OAuth 2.0 assertion.
+
+ This assertion is used during the OAuth 2.0 grant to acquire an
+ access token.
+
+ Returns:
+ bytes: The authorization grant assertion.
+ """
+ now = _helpers.utcnow()
+ lifetime = datetime.timedelta(seconds=_DEFAULT_TOKEN_LIFETIME_SECS)
+ expiry = now + lifetime
+
+ payload = {
+ "iat": _helpers.datetime_to_secs(now),
+ "exp": _helpers.datetime_to_secs(expiry),
+ # The issuer must be the service account email.
+ "iss": self._service_account_email,
+ # The audience must be the auth token endpoint's URI
+ "aud": _GOOGLE_OAUTH2_TOKEN_ENDPOINT,
+ "scope": _helpers.scopes_to_string(self._scopes or ()),
+ }
+
+ payload.update(self._additional_claims)
+
+ # The subject can be a user email for domain-wide delegation.
+ if self._subject:
+ payload.setdefault("sub", self._subject)
+
+ token = jwt.encode(self._signer, payload)
+
+ return token
+
+ def _use_self_signed_jwt(self):
+ # Since domain wide delegation doesn't work with self signed JWT. If
+ # subject exists, then we should not use self signed JWT.
+ return self._subject is None and self._jwt_credentials is not None
+
+ def _metric_header_for_usage(self):
+ if self._use_self_signed_jwt():
+ return metrics.CRED_TYPE_SA_JWT
+ return metrics.CRED_TYPE_SA_ASSERTION
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def refresh(self, request):
+ if self._always_use_jwt_access and not self._jwt_credentials:
+ # If self signed jwt should be used but jwt credential is not
+ # created, try to create one with scopes
+ self._create_self_signed_jwt(None)
+
+ if self._universe_domain != _DEFAULT_UNIVERSE_DOMAIN and self._subject:
+ raise exceptions.RefreshError(
+ "domain wide delegation is not supported for non-default universe domain"
+ )
+
+ if self._use_self_signed_jwt():
+ self._jwt_credentials.refresh(request)
+ self.token = self._jwt_credentials.token.decode()
+ self.expiry = self._jwt_credentials.expiry
+ else:
+ assertion = self._make_authorization_grant_assertion()
+ access_token, expiry, _ = _client.jwt_grant(
+ request, self._token_uri, assertion
+ )
+ self.token = access_token
+ self.expiry = expiry
+
+ def _create_self_signed_jwt(self, audience):
+ """Create a self-signed JWT from the credentials if requirements are met.
+
+ Args:
+ audience (str): The service URL. ``https://[API_ENDPOINT]/``
+ """
+ # https://google.aip.dev/auth/4111
+ if self._always_use_jwt_access:
+ if self._scopes:
+ additional_claims = {"scope": " ".join(self._scopes)}
+ if (
+ self._jwt_credentials is None
+ or self._jwt_credentials.additional_claims != additional_claims
+ ):
+ self._jwt_credentials = jwt.Credentials.from_signing_credentials(
+ self, None, additional_claims=additional_claims
+ )
+ elif audience:
+ if (
+ self._jwt_credentials is None
+ or self._jwt_credentials._audience != audience
+ ):
+
+ self._jwt_credentials = jwt.Credentials.from_signing_credentials(
+ self, audience
+ )
+ elif self._default_scopes:
+ additional_claims = {"scope": " ".join(self._default_scopes)}
+ if (
+ self._jwt_credentials is None
+ or additional_claims != self._jwt_credentials.additional_claims
+ ):
+ self._jwt_credentials = jwt.Credentials.from_signing_credentials(
+ self, None, additional_claims=additional_claims
+ )
+ elif not self._scopes and audience:
+ self._jwt_credentials = jwt.Credentials.from_signing_credentials(
+ self, audience
+ )
+
+ @_helpers.copy_docstring(credentials.Signing)
+ def sign_bytes(self, message):
+ return self._signer.sign(message)
+
+ @property # type: ignore
+ @_helpers.copy_docstring(credentials.Signing)
+ def signer(self):
+ return self._signer
+
+ @property # type: ignore
+ @_helpers.copy_docstring(credentials.Signing)
+ def signer_email(self):
+ return self._service_account_email
+
+
+class IDTokenCredentials(
+ credentials.Signing,
+ credentials.CredentialsWithQuotaProject,
+ credentials.CredentialsWithTokenUri,
+):
+ """Open ID Connect ID Token-based service account credentials.
+
+ These credentials are largely similar to :class:`.Credentials`, but instead
+ of using an OAuth 2.0 Access Token as the bearer token, they use an Open
+ ID Connect ID Token as the bearer token. These credentials are useful when
+ communicating to services that require ID Tokens and can not accept access
+ tokens.
+
+ Usually, you'll create these credentials with one of the helper
+ constructors. To create credentials using a Google service account
+ private key JSON file::
+
+ credentials = (
+ service_account.IDTokenCredentials.from_service_account_file(
+ 'service-account.json'))
+
+
+ Or if you already have the service account file loaded::
+
+ service_account_info = json.load(open('service_account.json'))
+ credentials = (
+ service_account.IDTokenCredentials.from_service_account_info(
+ service_account_info))
+
+
+ Both helper methods pass on arguments to the constructor, so you can
+ specify additional scopes and a subject if necessary::
+
+ credentials = (
+ service_account.IDTokenCredentials.from_service_account_file(
+ 'service-account.json',
+ scopes=['email'],
+ subject='user@example.com'))
+
+
+ The credentials are considered immutable. If you want to modify the scopes
+ or the subject used for delegation, use :meth:`with_scopes` or
+ :meth:`with_subject`::
+
+ scoped_credentials = credentials.with_scopes(['email'])
+ delegated_credentials = credentials.with_subject(subject)
+
+ """
+
+ def __init__(
+ self,
+ signer,
+ service_account_email,
+ token_uri,
+ target_audience,
+ additional_claims=None,
+ quota_project_id=None,
+ universe_domain=_DEFAULT_UNIVERSE_DOMAIN,
+ ):
+ """
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ service_account_email (str): The service account's email.
+ token_uri (str): The OAuth 2.0 Token URI.
+ target_audience (str): The intended audience for these credentials,
+ used when requesting the ID Token. The ID Token's ``aud`` claim
+ will be set to this string.
+ additional_claims (Mapping[str, str]): Any additional claims for
+ the JWT assertion used in the authorization grant.
+ quota_project_id (Optional[str]): The project ID used for quota and billing.
+ universe_domain (str): The universe domain. The default
+ universe domain is googleapis.com. For default value IAM ID
+ token endponint is used for token refresh. Note that
+ iam.serviceAccountTokenCreator role is required to use the IAM
+ endpoint.
+ .. note:: Typically one of the helper constructors
+ :meth:`from_service_account_file` or
+ :meth:`from_service_account_info` are used instead of calling the
+ constructor directly.
+ """
+ super(IDTokenCredentials, self).__init__()
+ self._signer = signer
+ self._service_account_email = service_account_email
+ self._token_uri = token_uri
+ self._target_audience = target_audience
+ self._quota_project_id = quota_project_id
+ self._use_iam_endpoint = False
+
+ if not universe_domain:
+ self._universe_domain = _DEFAULT_UNIVERSE_DOMAIN
+ else:
+ self._universe_domain = universe_domain
+
+ if universe_domain != _DEFAULT_UNIVERSE_DOMAIN:
+ self._use_iam_endpoint = True
+
+ if additional_claims is not None:
+ self._additional_claims = additional_claims
+ else:
+ self._additional_claims = {}
+
+ @classmethod
+ def _from_signer_and_info(cls, signer, info, **kwargs):
+ """Creates a credentials instance from a signer and service account
+ info.
+
+ Args:
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
+ info (Mapping[str, str]): The service account info.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.jwt.IDTokenCredentials: The constructed credentials.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ kwargs.setdefault("service_account_email", info["client_email"])
+ kwargs.setdefault("token_uri", info["token_uri"])
+ if "universe_domain" in info:
+ kwargs["universe_domain"] = info["universe_domain"]
+ return cls(signer, **kwargs)
+
+ @classmethod
+ def from_service_account_info(cls, info, **kwargs):
+ """Creates a credentials instance from parsed service account info.
+
+ Args:
+ info (Mapping[str, str]): The service account info in Google
+ format.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.service_account.IDTokenCredentials: The constructed
+ credentials.
+
+ Raises:
+ ValueError: If the info is not in the expected format.
+ """
+ signer = _service_account_info.from_dict(
+ info, require=["client_email", "token_uri"]
+ )
+ return cls._from_signer_and_info(signer, info, **kwargs)
+
+ @classmethod
+ def from_service_account_file(cls, filename, **kwargs):
+ """Creates a credentials instance from a service account json file.
+
+ Args:
+ filename (str): The path to the service account json file.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ google.auth.service_account.IDTokenCredentials: The constructed
+ credentials.
+ """
+ info, signer = _service_account_info.from_filename(
+ filename, require=["client_email", "token_uri"]
+ )
+ return cls._from_signer_and_info(signer, info, **kwargs)
+
+ def _make_copy(self):
+ cred = self.__class__(
+ self._signer,
+ service_account_email=self._service_account_email,
+ token_uri=self._token_uri,
+ target_audience=self._target_audience,
+ additional_claims=self._additional_claims.copy(),
+ quota_project_id=self.quota_project_id,
+ universe_domain=self._universe_domain,
+ )
+ # _use_iam_endpoint is not exposed in the constructor
+ cred._use_iam_endpoint = self._use_iam_endpoint
+ return cred
+
+ def with_target_audience(self, target_audience):
+ """Create a copy of these credentials with the specified target
+ audience.
+
+ Args:
+ target_audience (str): The intended audience for these credentials,
+ used when requesting the ID Token.
+
+ Returns:
+ google.auth.service_account.IDTokenCredentials: A new credentials
+ instance.
+ """
+ cred = self._make_copy()
+ cred._target_audience = target_audience
+ return cred
+
+ def _with_use_iam_endpoint(self, use_iam_endpoint):
+ """Create a copy of these credentials with the use_iam_endpoint value.
+
+ Args:
+ use_iam_endpoint (bool): If True, IAM generateIdToken endpoint will
+ be used instead of the token_uri. Note that
+ iam.serviceAccountTokenCreator role is required to use the IAM
+ endpoint. The default value is False. This feature is currently
+ experimental and subject to change without notice.
+
+ Returns:
+ google.auth.service_account.IDTokenCredentials: A new credentials
+ instance.
+ Raises:
+ google.auth.exceptions.InvalidValue: If the universe domain is not
+ default and use_iam_endpoint is False.
+ """
+ cred = self._make_copy()
+ if cred._universe_domain != _DEFAULT_UNIVERSE_DOMAIN and not use_iam_endpoint:
+ raise exceptions.InvalidValue(
+ "use_iam_endpoint should be True for non-default universe domain"
+ )
+ cred._use_iam_endpoint = use_iam_endpoint
+ return cred
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ cred = self._make_copy()
+ cred._quota_project_id = quota_project_id
+ return cred
+
+ @_helpers.copy_docstring(credentials.CredentialsWithTokenUri)
+ def with_token_uri(self, token_uri):
+ cred = self._make_copy()
+ cred._token_uri = token_uri
+ return cred
+
+ def _make_authorization_grant_assertion(self):
+ """Create the OAuth 2.0 assertion.
+
+ This assertion is used during the OAuth 2.0 grant to acquire an
+ ID token.
+
+ Returns:
+ bytes: The authorization grant assertion.
+ """
+ now = _helpers.utcnow()
+ lifetime = datetime.timedelta(seconds=_DEFAULT_TOKEN_LIFETIME_SECS)
+ expiry = now + lifetime
+
+ payload = {
+ "iat": _helpers.datetime_to_secs(now),
+ "exp": _helpers.datetime_to_secs(expiry),
+ # The issuer must be the service account email.
+ "iss": self.service_account_email,
+ # The audience must be the auth token endpoint's URI
+ "aud": _GOOGLE_OAUTH2_TOKEN_ENDPOINT,
+ # The target audience specifies which service the ID token is
+ # intended for.
+ "target_audience": self._target_audience,
+ }
+
+ payload.update(self._additional_claims)
+
+ token = jwt.encode(self._signer, payload)
+
+ return token
+
+ def _refresh_with_iam_endpoint(self, request):
+ """Use IAM generateIdToken endpoint to obtain an ID token.
+
+ It works as follows:
+
+ 1. First we create a self signed jwt with
+ https://www.googleapis.com/auth/iam being the scope.
+
+ 2. Next we use the self signed jwt as the access token, and make a POST
+ request to IAM generateIdToken endpoint. The request body is:
+ {
+ "audience": self._target_audience,
+ "includeEmail": "true",
+ "useEmailAzp": "true",
+ }
+
+ If the request is succesfully, it will return {"token":"the ID token"},
+ and we can extract the ID token and compute its expiry.
+ """
+ jwt_credentials = jwt.Credentials.from_signing_credentials(
+ self,
+ None,
+ additional_claims={"scope": "https://www.googleapis.com/auth/iam"},
+ )
+ jwt_credentials.refresh(request)
+ self.token, self.expiry = _client.call_iam_generate_id_token_endpoint(
+ request,
+ self.signer_email,
+ self._target_audience,
+ jwt_credentials.token.decode(),
+ )
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def refresh(self, request):
+ if self._use_iam_endpoint:
+ self._refresh_with_iam_endpoint(request)
+ else:
+ assertion = self._make_authorization_grant_assertion()
+ access_token, expiry, _ = _client.id_token_jwt_grant(
+ request, self._token_uri, assertion
+ )
+ self.token = access_token
+ self.expiry = expiry
+
+ @property
+ def service_account_email(self):
+ """The service account email."""
+ return self._service_account_email
+
+ @_helpers.copy_docstring(credentials.Signing)
+ def sign_bytes(self, message):
+ return self._signer.sign(message)
+
+ @property # type: ignore
+ @_helpers.copy_docstring(credentials.Signing)
+ def signer(self):
+ return self._signer
+
+ @property # type: ignore
+ @_helpers.copy_docstring(credentials.Signing)
+ def signer_email(self):
+ return self._service_account_email
diff --git a/Lib/site-packages/google/oauth2/sts.py b/Lib/site-packages/google/oauth2/sts.py
new file mode 100644
index 0000000..ad39627
--- /dev/null
+++ b/Lib/site-packages/google/oauth2/sts.py
@@ -0,0 +1,176 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""OAuth 2.0 Token Exchange Spec.
+
+This module defines a token exchange utility based on the `OAuth 2.0 Token
+Exchange`_ spec. This will be mainly used to exchange external credentials
+for GCP access tokens in workload identity pools to access Google APIs.
+
+The implementation will support various types of client authentication as
+allowed in the spec.
+
+A deviation on the spec will be for additional Google specific options that
+cannot be easily mapped to parameters defined in the RFC.
+
+The returned dictionary response will be based on the `rfc8693 section 2.2.1`_
+spec JSON response.
+
+.. _OAuth 2.0 Token Exchange: https://tools.ietf.org/html/rfc8693
+.. _rfc8693 section 2.2.1: https://tools.ietf.org/html/rfc8693#section-2.2.1
+"""
+
+import http.client as http_client
+import json
+import urllib
+
+from google.oauth2 import utils
+
+
+_URLENCODED_HEADERS = {"Content-Type": "application/x-www-form-urlencoded"}
+
+
+class Client(utils.OAuthClientAuthHandler):
+ """Implements the OAuth 2.0 token exchange spec based on
+ https://tools.ietf.org/html/rfc8693.
+ """
+
+ def __init__(self, token_exchange_endpoint, client_authentication=None):
+ """Initializes an STS client instance.
+
+ Args:
+ token_exchange_endpoint (str): The token exchange endpoint.
+ client_authentication (Optional(google.oauth2.oauth2_utils.ClientAuthentication)):
+ The optional OAuth client authentication credentials if available.
+ """
+ super(Client, self).__init__(client_authentication)
+ self._token_exchange_endpoint = token_exchange_endpoint
+
+ def _make_request(self, request, headers, request_body):
+ # Initialize request headers.
+ request_headers = _URLENCODED_HEADERS.copy()
+
+ # Inject additional headers.
+ if headers:
+ for k, v in dict(headers).items():
+ request_headers[k] = v
+
+ # Apply OAuth client authentication.
+ self.apply_client_authentication_options(request_headers, request_body)
+
+ # Execute request.
+ response = request(
+ url=self._token_exchange_endpoint,
+ method="POST",
+ headers=request_headers,
+ body=urllib.parse.urlencode(request_body).encode("utf-8"),
+ )
+
+ response_body = (
+ response.data.decode("utf-8")
+ if hasattr(response.data, "decode")
+ else response.data
+ )
+
+ # If non-200 response received, translate to OAuthError exception.
+ if response.status != http_client.OK:
+ utils.handle_error_response(response_body)
+
+ response_data = json.loads(response_body)
+
+ # Return successful response.
+ return response_data
+
+ def exchange_token(
+ self,
+ request,
+ grant_type,
+ subject_token,
+ subject_token_type,
+ resource=None,
+ audience=None,
+ scopes=None,
+ requested_token_type=None,
+ actor_token=None,
+ actor_token_type=None,
+ additional_options=None,
+ additional_headers=None,
+ ):
+ """Exchanges the provided token for another type of token based on the
+ rfc8693 spec.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ grant_type (str): The OAuth 2.0 token exchange grant type.
+ subject_token (str): The OAuth 2.0 token exchange subject token.
+ subject_token_type (str): The OAuth 2.0 token exchange subject token type.
+ resource (Optional[str]): The optional OAuth 2.0 token exchange resource field.
+ audience (Optional[str]): The optional OAuth 2.0 token exchange audience field.
+ scopes (Optional[Sequence[str]]): The optional list of scopes to use.
+ requested_token_type (Optional[str]): The optional OAuth 2.0 token exchange requested
+ token type.
+ actor_token (Optional[str]): The optional OAuth 2.0 token exchange actor token.
+ actor_token_type (Optional[str]): The optional OAuth 2.0 token exchange actor token type.
+ additional_options (Optional[Mapping[str, str]]): The optional additional
+ non-standard Google specific options.
+ additional_headers (Optional[Mapping[str, str]]): The optional additional
+ headers to pass to the token exchange endpoint.
+
+ Returns:
+ Mapping[str, str]: The token exchange JSON-decoded response data containing
+ the requested token and its expiration time.
+
+ Raises:
+ google.auth.exceptions.OAuthError: If the token endpoint returned
+ an error.
+ """
+ # Initialize request body.
+ request_body = {
+ "grant_type": grant_type,
+ "resource": resource,
+ "audience": audience,
+ "scope": " ".join(scopes or []),
+ "requested_token_type": requested_token_type,
+ "subject_token": subject_token,
+ "subject_token_type": subject_token_type,
+ "actor_token": actor_token,
+ "actor_token_type": actor_token_type,
+ "options": None,
+ }
+ # Add additional non-standard options.
+ if additional_options:
+ request_body["options"] = urllib.parse.quote(json.dumps(additional_options))
+ # Remove empty fields in request body.
+ for k, v in dict(request_body).items():
+ if v is None or v == "":
+ del request_body[k]
+
+ return self._make_request(request, additional_headers, request_body)
+
+ def refresh_token(self, request, refresh_token):
+ """Exchanges a refresh token for an access token based on the
+ RFC6749 spec.
+
+ Args:
+ request (google.auth.transport.Request): A callable used to make
+ HTTP requests.
+ subject_token (str): The OAuth 2.0 refresh token.
+ """
+
+ return self._make_request(
+ request,
+ None,
+ {"grant_type": "refresh_token", "refresh_token": refresh_token},
+ )
diff --git a/Lib/site-packages/google/oauth2/utils.py b/Lib/site-packages/google/oauth2/utils.py
new file mode 100644
index 0000000..d72ff19
--- /dev/null
+++ b/Lib/site-packages/google/oauth2/utils.py
@@ -0,0 +1,168 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""OAuth 2.0 Utilities.
+
+This module provides implementations for various OAuth 2.0 utilities.
+This includes `OAuth error handling`_ and
+`Client authentication for OAuth flows`_.
+
+OAuth error handling
+--------------------
+This will define interfaces for handling OAuth related error responses as
+stated in `RFC 6749 section 5.2`_.
+This will include a common function to convert these HTTP error responses to a
+:class:`google.auth.exceptions.OAuthError` exception.
+
+
+Client authentication for OAuth flows
+-------------------------------------
+We introduce an interface for defining client authentication credentials based
+on `RFC 6749 section 2.3.1`_. This will expose the following
+capabilities:
+
+ * Ability to support basic authentication via request header.
+ * Ability to support bearer token authentication via request header.
+ * Ability to support client ID / secret authentication via request body.
+
+.. _RFC 6749 section 2.3.1: https://tools.ietf.org/html/rfc6749#section-2.3.1
+.. _RFC 6749 section 5.2: https://tools.ietf.org/html/rfc6749#section-5.2
+"""
+
+import abc
+import base64
+import enum
+import json
+
+from google.auth import exceptions
+
+
+# OAuth client authentication based on
+# https://tools.ietf.org/html/rfc6749#section-2.3.
+class ClientAuthType(enum.Enum):
+ basic = 1
+ request_body = 2
+
+
+class ClientAuthentication(object):
+ """Defines the client authentication credentials for basic and request-body
+ types based on https://tools.ietf.org/html/rfc6749#section-2.3.1.
+ """
+
+ def __init__(self, client_auth_type, client_id, client_secret=None):
+ """Instantiates a client authentication object containing the client ID
+ and secret credentials for basic and response-body auth.
+
+ Args:
+ client_auth_type (google.oauth2.oauth_utils.ClientAuthType): The
+ client authentication type.
+ client_id (str): The client ID.
+ client_secret (Optional[str]): The client secret.
+ """
+ self.client_auth_type = client_auth_type
+ self.client_id = client_id
+ self.client_secret = client_secret
+
+
+class OAuthClientAuthHandler(metaclass=abc.ABCMeta):
+ """Abstract class for handling client authentication in OAuth-based
+ operations.
+ """
+
+ def __init__(self, client_authentication=None):
+ """Instantiates an OAuth client authentication handler.
+
+ Args:
+ client_authentication (Optional[google.oauth2.utils.ClientAuthentication]):
+ The OAuth client authentication credentials if available.
+ """
+ super(OAuthClientAuthHandler, self).__init__()
+ self._client_authentication = client_authentication
+
+ def apply_client_authentication_options(
+ self, headers, request_body=None, bearer_token=None
+ ):
+ """Applies client authentication on the OAuth request's headers or POST
+ body.
+
+ Args:
+ headers (Mapping[str, str]): The HTTP request header.
+ request_body (Optional[Mapping[str, str]]): The HTTP request body
+ dictionary. For requests that do not support request body, this
+ is None and will be ignored.
+ bearer_token (Optional[str]): The optional bearer token.
+ """
+ # Inject authenticated header.
+ self._inject_authenticated_headers(headers, bearer_token)
+ # Inject authenticated request body.
+ if bearer_token is None:
+ self._inject_authenticated_request_body(request_body)
+
+ def _inject_authenticated_headers(self, headers, bearer_token=None):
+ if bearer_token is not None:
+ headers["Authorization"] = "Bearer %s" % bearer_token
+ elif (
+ self._client_authentication is not None
+ and self._client_authentication.client_auth_type is ClientAuthType.basic
+ ):
+ username = self._client_authentication.client_id
+ password = self._client_authentication.client_secret or ""
+
+ credentials = base64.b64encode(
+ ("%s:%s" % (username, password)).encode()
+ ).decode()
+ headers["Authorization"] = "Basic %s" % credentials
+
+ def _inject_authenticated_request_body(self, request_body):
+ if (
+ self._client_authentication is not None
+ and self._client_authentication.client_auth_type
+ is ClientAuthType.request_body
+ ):
+ if request_body is None:
+ raise exceptions.OAuthError(
+ "HTTP request does not support request-body"
+ )
+ else:
+ request_body["client_id"] = self._client_authentication.client_id
+ request_body["client_secret"] = (
+ self._client_authentication.client_secret or ""
+ )
+
+
+def handle_error_response(response_body):
+ """Translates an error response from an OAuth operation into an
+ OAuthError exception.
+
+ Args:
+ response_body (str): The decoded response data.
+
+ Raises:
+ google.auth.exceptions.OAuthError
+ """
+ try:
+ error_components = []
+ error_data = json.loads(response_body)
+
+ error_components.append("Error code {}".format(error_data["error"]))
+ if "error_description" in error_data:
+ error_components.append(": {}".format(error_data["error_description"]))
+ if "error_uri" in error_data:
+ error_components.append(" - {}".format(error_data["error_uri"]))
+ error_details = "".join(error_components)
+ # If no details could be extracted, use the response data.
+ except (KeyError, ValueError):
+ error_details = response_body
+
+ raise exceptions.OAuthError(error_details, response_body)
diff --git a/Lib/site-packages/google/protobuf/__init__.py b/Lib/site-packages/google/protobuf/__init__.py
new file mode 100644
index 0000000..2f627e3
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/__init__.py
@@ -0,0 +1,10 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+# Copyright 2007 Google Inc. All Rights Reserved.
+
+__version__ = '4.25.2'
diff --git a/Lib/site-packages/google/protobuf/any_pb2.py b/Lib/site-packages/google/protobuf/any_pb2.py
new file mode 100644
index 0000000..1082af1
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/any_pb2.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/any.proto
+# Protobuf Python Version: 4.25.2
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"6\n\x03\x41ny\x12\x19\n\x08type_url\x18\x01 \x01(\tR\x07typeUrl\x12\x14\n\x05value\x18\x02 \x01(\x0cR\x05valueBv\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ _globals['DESCRIPTOR']._options = None
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _globals['_ANY']._serialized_start=46
+ _globals['_ANY']._serialized_end=100
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/protobuf/api_pb2.py b/Lib/site-packages/google/protobuf/api_pb2.py
new file mode 100644
index 0000000..2dd3d9c
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/api_pb2.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/api.proto
+# Protobuf Python Version: 4.25.2
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
+from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\xc1\x02\n\x03\x41pi\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x31\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.MethodR\x07methods\x12\x31\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.OptionR\x07options\x12\x18\n\x07version\x18\x04 \x01(\tR\x07version\x12\x45\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContextR\rsourceContext\x12.\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.MixinR\x06mixins\x12/\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.SyntaxR\x06syntax\"\xb2\x02\n\x06Method\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12(\n\x10request_type_url\x18\x02 \x01(\tR\x0erequestTypeUrl\x12+\n\x11request_streaming\x18\x03 \x01(\x08R\x10requestStreaming\x12*\n\x11response_type_url\x18\x04 \x01(\tR\x0fresponseTypeUrl\x12-\n\x12response_streaming\x18\x05 \x01(\x08R\x11responseStreaming\x12\x31\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.OptionR\x07options\x12/\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.SyntaxR\x06syntax\"/\n\x05Mixin\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x12\n\x04root\x18\x02 \x01(\tR\x04rootBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ _globals['DESCRIPTOR']._options = None
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _globals['_API']._serialized_start=113
+ _globals['_API']._serialized_end=434
+ _globals['_METHOD']._serialized_start=437
+ _globals['_METHOD']._serialized_end=743
+ _globals['_MIXIN']._serialized_start=745
+ _globals['_MIXIN']._serialized_end=792
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/protobuf/compiler/__init__.py b/Lib/site-packages/google/protobuf/compiler/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/google/protobuf/compiler/plugin_pb2.py b/Lib/site-packages/google/protobuf/compiler/plugin_pb2.py
new file mode 100644
index 0000000..8b75bc5
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/compiler/plugin_pb2.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/compiler/plugin.proto
+# Protobuf Python Version: 4.25.2
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"c\n\x07Version\x12\x14\n\x05major\x18\x01 \x01(\x05R\x05major\x12\x14\n\x05minor\x18\x02 \x01(\x05R\x05minor\x12\x14\n\x05patch\x18\x03 \x01(\x05R\x05patch\x12\x16\n\x06suffix\x18\x04 \x01(\tR\x06suffix\"\xcf\x02\n\x14\x43odeGeneratorRequest\x12(\n\x10\x66ile_to_generate\x18\x01 \x03(\tR\x0e\x66ileToGenerate\x12\x1c\n\tparameter\x18\x02 \x01(\tR\tparameter\x12\x43\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProtoR\tprotoFile\x12\\\n\x17source_file_descriptors\x18\x11 \x03(\x0b\x32$.google.protobuf.FileDescriptorProtoR\x15sourceFileDescriptors\x12L\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.VersionR\x0f\x63ompilerVersion\"\xb3\x03\n\x15\x43odeGeneratorResponse\x12\x14\n\x05\x65rror\x18\x01 \x01(\tR\x05\x65rror\x12-\n\x12supported_features\x18\x02 \x01(\x04R\x11supportedFeatures\x12H\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.FileR\x04\x66ile\x1a\xb1\x01\n\x04\x46ile\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\'\n\x0finsertion_point\x18\x02 \x01(\tR\x0einsertionPoint\x12\x18\n\x07\x63ontent\x18\x0f \x01(\tR\x07\x63ontent\x12R\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfoR\x11generatedCodeInfo\"W\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x12\x1d\n\x19\x46\x45\x41TURE_SUPPORTS_EDITIONS\x10\x02\x42r\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb\xaa\x02\x18Google.Protobuf.Compiler')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ _globals['DESCRIPTOR']._options = None
+ _globals['DESCRIPTOR']._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb\252\002\030Google.Protobuf.Compiler'
+ _globals['_VERSION']._serialized_start=101
+ _globals['_VERSION']._serialized_end=200
+ _globals['_CODEGENERATORREQUEST']._serialized_start=203
+ _globals['_CODEGENERATORREQUEST']._serialized_end=538
+ _globals['_CODEGENERATORRESPONSE']._serialized_start=541
+ _globals['_CODEGENERATORRESPONSE']._serialized_end=976
+ _globals['_CODEGENERATORRESPONSE_FILE']._serialized_start=710
+ _globals['_CODEGENERATORRESPONSE_FILE']._serialized_end=887
+ _globals['_CODEGENERATORRESPONSE_FEATURE']._serialized_start=889
+ _globals['_CODEGENERATORRESPONSE_FEATURE']._serialized_end=976
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/protobuf/descriptor.py b/Lib/site-packages/google/protobuf/descriptor.py
new file mode 100644
index 0000000..5b32e5e
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/descriptor.py
@@ -0,0 +1,1282 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Descriptors essentially contain exactly the information found in a .proto
+file, in types that make this information accessible in Python.
+"""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+import threading
+import warnings
+
+from google.protobuf.internal import api_implementation
+
+_USE_C_DESCRIPTORS = False
+if api_implementation.Type() != 'python':
+ # Used by MakeDescriptor in cpp mode
+ import binascii
+ import os
+ # pylint: disable=protected-access
+ _message = api_implementation._c_module
+ # TODO: Remove this import after fix api_implementation
+ if _message is None:
+ from google.protobuf.pyext import _message
+ _USE_C_DESCRIPTORS = True
+
+
+class Error(Exception):
+ """Base error for this module."""
+
+
+class TypeTransformationError(Error):
+ """Error transforming between python proto type and corresponding C++ type."""
+
+
+if _USE_C_DESCRIPTORS:
+ # This metaclass allows to override the behavior of code like
+ # isinstance(my_descriptor, FieldDescriptor)
+ # and make it return True when the descriptor is an instance of the extension
+ # type written in C++.
+ class DescriptorMetaclass(type):
+
+ def __instancecheck__(cls, obj):
+ if super(DescriptorMetaclass, cls).__instancecheck__(obj):
+ return True
+ if isinstance(obj, cls._C_DESCRIPTOR_CLASS):
+ return True
+ return False
+else:
+ # The standard metaclass; nothing changes.
+ DescriptorMetaclass = type
+
+
+class _Lock(object):
+ """Wrapper class of threading.Lock(), which is allowed by 'with'."""
+
+ def __new__(cls):
+ self = object.__new__(cls)
+ self._lock = threading.Lock() # pylint: disable=protected-access
+ return self
+
+ def __enter__(self):
+ self._lock.acquire()
+
+ def __exit__(self, exc_type, exc_value, exc_tb):
+ self._lock.release()
+
+
+_lock = threading.Lock()
+
+
+def _Deprecated(name):
+ if _Deprecated.count > 0:
+ _Deprecated.count -= 1
+ warnings.warn(
+ 'Call to deprecated create function %s(). Note: Create unlinked '
+ 'descriptors is going to go away. Please use get/find descriptors from '
+ 'generated code or query the descriptor_pool.'
+ % name,
+ category=DeprecationWarning, stacklevel=3)
+
+
+# Deprecated warnings will print 100 times at most which should be enough for
+# users to notice and do not cause timeout.
+_Deprecated.count = 100
+
+
+_internal_create_key = object()
+
+
+class DescriptorBase(metaclass=DescriptorMetaclass):
+
+ """Descriptors base class.
+
+ This class is the base of all descriptor classes. It provides common options
+ related functionality.
+
+ Attributes:
+ has_options: True if the descriptor has non-default options. Usually it is
+ not necessary to read this -- just call GetOptions() which will happily
+ return the default instance. However, it's sometimes useful for
+ efficiency, and also useful inside the protobuf implementation to avoid
+ some bootstrapping issues.
+ file (FileDescriptor): Reference to file info.
+ """
+
+ if _USE_C_DESCRIPTORS:
+ # The class, or tuple of classes, that are considered as "virtual
+ # subclasses" of this descriptor class.
+ _C_DESCRIPTOR_CLASS = ()
+
+ def __init__(self, file, options, serialized_options, options_class_name):
+ """Initialize the descriptor given its options message and the name of the
+ class of the options message. The name of the class is required in case
+ the options message is None and has to be created.
+ """
+ self.file = file
+ self._options = options
+ self._options_class_name = options_class_name
+ self._serialized_options = serialized_options
+
+ # Does this descriptor have non-default options?
+ self.has_options = (self._options is not None) or (
+ self._serialized_options is not None
+ )
+
+ def _SetOptions(self, options, options_class_name):
+ """Sets the descriptor's options
+
+ This function is used in generated proto2 files to update descriptor
+ options. It must not be used outside proto2.
+ """
+ self._options = options
+ self._options_class_name = options_class_name
+
+ # Does this descriptor have non-default options?
+ self.has_options = options is not None
+
+ def GetOptions(self):
+ """Retrieves descriptor options.
+
+ This method returns the options set or creates the default options for the
+ descriptor.
+ """
+ if self._options:
+ return self._options
+
+ from google.protobuf import descriptor_pb2
+ try:
+ options_class = getattr(descriptor_pb2,
+ self._options_class_name)
+ except AttributeError:
+ raise RuntimeError('Unknown options class name %s!' %
+ (self._options_class_name))
+
+ if self._serialized_options is None:
+ with _lock:
+ self._options = options_class()
+ else:
+ options = _ParseOptions(options_class(), self._serialized_options)
+ with _lock:
+ self._options = options
+
+ return self._options
+
+
+class _NestedDescriptorBase(DescriptorBase):
+ """Common class for descriptors that can be nested."""
+
+ def __init__(self, options, options_class_name, name, full_name,
+ file, containing_type, serialized_start=None,
+ serialized_end=None, serialized_options=None):
+ """Constructor.
+
+ Args:
+ options: Protocol message options or None to use default message options.
+ options_class_name (str): The class name of the above options.
+ name (str): Name of this protocol message type.
+ full_name (str): Fully-qualified name of this protocol message type, which
+ will include protocol "package" name and the name of any enclosing
+ types.
+ containing_type: if provided, this is a nested descriptor, with this
+ descriptor as parent, otherwise None.
+ serialized_start: The start index (inclusive) in block in the
+ file.serialized_pb that describes this descriptor.
+ serialized_end: The end index (exclusive) in block in the
+ file.serialized_pb that describes this descriptor.
+ serialized_options: Protocol message serialized options or None.
+ """
+ super(_NestedDescriptorBase, self).__init__(
+ file, options, serialized_options, options_class_name
+ )
+
+ self.name = name
+ # TODO: Add function to calculate full_name instead of having it in
+ # memory?
+ self.full_name = full_name
+ self.containing_type = containing_type
+
+ self._serialized_start = serialized_start
+ self._serialized_end = serialized_end
+
+ def CopyToProto(self, proto):
+ """Copies this to the matching proto in descriptor_pb2.
+
+ Args:
+ proto: An empty proto instance from descriptor_pb2.
+
+ Raises:
+ Error: If self couldn't be serialized, due to to few constructor
+ arguments.
+ """
+ if (self.file is not None and
+ self._serialized_start is not None and
+ self._serialized_end is not None):
+ proto.ParseFromString(self.file.serialized_pb[
+ self._serialized_start:self._serialized_end])
+ else:
+ raise Error('Descriptor does not contain serialization.')
+
+
+class Descriptor(_NestedDescriptorBase):
+
+ """Descriptor for a protocol message type.
+
+ Attributes:
+ name (str): Name of this protocol message type.
+ full_name (str): Fully-qualified name of this protocol message type,
+ which will include protocol "package" name and the name of any
+ enclosing types.
+ containing_type (Descriptor): Reference to the descriptor of the type
+ containing us, or None if this is top-level.
+ fields (list[FieldDescriptor]): Field descriptors for all fields in
+ this type.
+ fields_by_number (dict(int, FieldDescriptor)): Same
+ :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed
+ by "number" attribute in each FieldDescriptor.
+ fields_by_name (dict(str, FieldDescriptor)): Same
+ :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed by
+ "name" attribute in each :class:`FieldDescriptor`.
+ nested_types (list[Descriptor]): Descriptor references
+ for all protocol message types nested within this one.
+ nested_types_by_name (dict(str, Descriptor)): Same Descriptor
+ objects as in :attr:`nested_types`, but indexed by "name" attribute
+ in each Descriptor.
+ enum_types (list[EnumDescriptor]): :class:`EnumDescriptor` references
+ for all enums contained within this type.
+ enum_types_by_name (dict(str, EnumDescriptor)): Same
+ :class:`EnumDescriptor` objects as in :attr:`enum_types`, but
+ indexed by "name" attribute in each EnumDescriptor.
+ enum_values_by_name (dict(str, EnumValueDescriptor)): Dict mapping
+ from enum value name to :class:`EnumValueDescriptor` for that value.
+ extensions (list[FieldDescriptor]): All extensions defined directly
+ within this message type (NOT within a nested type).
+ extensions_by_name (dict(str, FieldDescriptor)): Same FieldDescriptor
+ objects as :attr:`extensions`, but indexed by "name" attribute of each
+ FieldDescriptor.
+ is_extendable (bool): Does this type define any extension ranges?
+ oneofs (list[OneofDescriptor]): The list of descriptors for oneof fields
+ in this message.
+ oneofs_by_name (dict(str, OneofDescriptor)): Same objects as in
+ :attr:`oneofs`, but indexed by "name" attribute.
+ file (FileDescriptor): Reference to file descriptor.
+ is_map_entry: If the message type is a map entry.
+
+ """
+
+ if _USE_C_DESCRIPTORS:
+ _C_DESCRIPTOR_CLASS = _message.Descriptor
+
+ def __new__(
+ cls,
+ name=None,
+ full_name=None,
+ filename=None,
+ containing_type=None,
+ fields=None,
+ nested_types=None,
+ enum_types=None,
+ extensions=None,
+ options=None,
+ serialized_options=None,
+ is_extendable=True,
+ extension_ranges=None,
+ oneofs=None,
+ file=None, # pylint: disable=redefined-builtin
+ serialized_start=None,
+ serialized_end=None,
+ syntax=None,
+ is_map_entry=False,
+ create_key=None):
+ _message.Message._CheckCalledFromGeneratedFile()
+ return _message.default_pool.FindMessageTypeByName(full_name)
+
+ # NOTE: The file argument redefining a builtin is nothing we can
+ # fix right now since we don't know how many clients already rely on the
+ # name of the argument.
+ def __init__(self, name, full_name, filename, containing_type, fields,
+ nested_types, enum_types, extensions, options=None,
+ serialized_options=None,
+ is_extendable=True, extension_ranges=None, oneofs=None,
+ file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin
+ syntax=None, is_map_entry=False, create_key=None):
+ """Arguments to __init__() are as described in the description
+ of Descriptor fields above.
+
+ Note that filename is an obsolete argument, that is not used anymore.
+ Please use file.name to access this as an attribute.
+ """
+ if create_key is not _internal_create_key:
+ _Deprecated('Descriptor')
+
+ super(Descriptor, self).__init__(
+ options, 'MessageOptions', name, full_name, file,
+ containing_type, serialized_start=serialized_start,
+ serialized_end=serialized_end, serialized_options=serialized_options)
+
+ # We have fields in addition to fields_by_name and fields_by_number,
+ # so that:
+ # 1. Clients can index fields by "order in which they're listed."
+ # 2. Clients can easily iterate over all fields with the terse
+ # syntax: for f in descriptor.fields: ...
+ self.fields = fields
+ for field in self.fields:
+ field.containing_type = self
+ self.fields_by_number = dict((f.number, f) for f in fields)
+ self.fields_by_name = dict((f.name, f) for f in fields)
+ self._fields_by_camelcase_name = None
+
+ self.nested_types = nested_types
+ for nested_type in nested_types:
+ nested_type.containing_type = self
+ self.nested_types_by_name = dict((t.name, t) for t in nested_types)
+
+ self.enum_types = enum_types
+ for enum_type in self.enum_types:
+ enum_type.containing_type = self
+ self.enum_types_by_name = dict((t.name, t) for t in enum_types)
+ self.enum_values_by_name = dict(
+ (v.name, v) for t in enum_types for v in t.values)
+
+ self.extensions = extensions
+ for extension in self.extensions:
+ extension.extension_scope = self
+ self.extensions_by_name = dict((f.name, f) for f in extensions)
+ self.is_extendable = is_extendable
+ self.extension_ranges = extension_ranges
+ self.oneofs = oneofs if oneofs is not None else []
+ self.oneofs_by_name = dict((o.name, o) for o in self.oneofs)
+ for oneof in self.oneofs:
+ oneof.containing_type = self
+ self._deprecated_syntax = syntax or "proto2"
+ self._is_map_entry = is_map_entry
+
+ @property
+ def syntax(self):
+ warnings.warn(
+ 'descriptor.syntax is deprecated. It will be removed'
+ ' soon. Most usages are checking field descriptors. Consider to use'
+ ' has_presence, is_packed on field descriptors.'
+ )
+ return self._deprecated_syntax
+
+ @property
+ def fields_by_camelcase_name(self):
+ """Same FieldDescriptor objects as in :attr:`fields`, but indexed by
+ :attr:`FieldDescriptor.camelcase_name`.
+ """
+ if self._fields_by_camelcase_name is None:
+ self._fields_by_camelcase_name = dict(
+ (f.camelcase_name, f) for f in self.fields)
+ return self._fields_by_camelcase_name
+
+ def EnumValueName(self, enum, value):
+ """Returns the string name of an enum value.
+
+ This is just a small helper method to simplify a common operation.
+
+ Args:
+ enum: string name of the Enum.
+ value: int, value of the enum.
+
+ Returns:
+ string name of the enum value.
+
+ Raises:
+ KeyError if either the Enum doesn't exist or the value is not a valid
+ value for the enum.
+ """
+ return self.enum_types_by_name[enum].values_by_number[value].name
+
+ def CopyToProto(self, proto):
+ """Copies this to a descriptor_pb2.DescriptorProto.
+
+ Args:
+ proto: An empty descriptor_pb2.DescriptorProto.
+ """
+ # This function is overridden to give a better doc comment.
+ super(Descriptor, self).CopyToProto(proto)
+
+
+# TODO: We should have aggressive checking here,
+# for example:
+# * If you specify a repeated field, you should not be allowed
+# to specify a default value.
+# * [Other examples here as needed].
+#
+# TODO: for this and other *Descriptor classes, we
+# might also want to lock things down aggressively (e.g.,
+# prevent clients from setting the attributes). Having
+# stronger invariants here in general will reduce the number
+# of runtime checks we must do in reflection.py...
+class FieldDescriptor(DescriptorBase):
+
+ """Descriptor for a single field in a .proto file.
+
+ Attributes:
+ name (str): Name of this field, exactly as it appears in .proto.
+ full_name (str): Name of this field, including containing scope. This is
+ particularly relevant for extensions.
+ index (int): Dense, 0-indexed index giving the order that this
+ field textually appears within its message in the .proto file.
+ number (int): Tag number declared for this field in the .proto file.
+
+ type (int): (One of the TYPE_* constants below) Declared type.
+ cpp_type (int): (One of the CPPTYPE_* constants below) C++ type used to
+ represent this field.
+
+ label (int): (One of the LABEL_* constants below) Tells whether this
+ field is optional, required, or repeated.
+ has_default_value (bool): True if this field has a default value defined,
+ otherwise false.
+ default_value (Varies): Default value of this field. Only
+ meaningful for non-repeated scalar fields. Repeated fields
+ should always set this to [], and non-repeated composite
+ fields should always set this to None.
+
+ containing_type (Descriptor): Descriptor of the protocol message
+ type that contains this field. Set by the Descriptor constructor
+ if we're passed into one.
+ Somewhat confusingly, for extension fields, this is the
+ descriptor of the EXTENDED message, not the descriptor
+ of the message containing this field. (See is_extension and
+ extension_scope below).
+ message_type (Descriptor): If a composite field, a descriptor
+ of the message type contained in this field. Otherwise, this is None.
+ enum_type (EnumDescriptor): If this field contains an enum, a
+ descriptor of that enum. Otherwise, this is None.
+
+ is_extension: True iff this describes an extension field.
+ extension_scope (Descriptor): Only meaningful if is_extension is True.
+ Gives the message that immediately contains this extension field.
+ Will be None iff we're a top-level (file-level) extension field.
+
+ options (descriptor_pb2.FieldOptions): Protocol message field options or
+ None to use default field options.
+
+ containing_oneof (OneofDescriptor): If the field is a member of a oneof
+ union, contains its descriptor. Otherwise, None.
+
+ file (FileDescriptor): Reference to file descriptor.
+ """
+
+ # Must be consistent with C++ FieldDescriptor::Type enum in
+ # descriptor.h.
+ #
+ # TODO: Find a way to eliminate this repetition.
+ TYPE_DOUBLE = 1
+ TYPE_FLOAT = 2
+ TYPE_INT64 = 3
+ TYPE_UINT64 = 4
+ TYPE_INT32 = 5
+ TYPE_FIXED64 = 6
+ TYPE_FIXED32 = 7
+ TYPE_BOOL = 8
+ TYPE_STRING = 9
+ TYPE_GROUP = 10
+ TYPE_MESSAGE = 11
+ TYPE_BYTES = 12
+ TYPE_UINT32 = 13
+ TYPE_ENUM = 14
+ TYPE_SFIXED32 = 15
+ TYPE_SFIXED64 = 16
+ TYPE_SINT32 = 17
+ TYPE_SINT64 = 18
+ MAX_TYPE = 18
+
+ # Must be consistent with C++ FieldDescriptor::CppType enum in
+ # descriptor.h.
+ #
+ # TODO: Find a way to eliminate this repetition.
+ CPPTYPE_INT32 = 1
+ CPPTYPE_INT64 = 2
+ CPPTYPE_UINT32 = 3
+ CPPTYPE_UINT64 = 4
+ CPPTYPE_DOUBLE = 5
+ CPPTYPE_FLOAT = 6
+ CPPTYPE_BOOL = 7
+ CPPTYPE_ENUM = 8
+ CPPTYPE_STRING = 9
+ CPPTYPE_MESSAGE = 10
+ MAX_CPPTYPE = 10
+
+ _PYTHON_TO_CPP_PROTO_TYPE_MAP = {
+ TYPE_DOUBLE: CPPTYPE_DOUBLE,
+ TYPE_FLOAT: CPPTYPE_FLOAT,
+ TYPE_ENUM: CPPTYPE_ENUM,
+ TYPE_INT64: CPPTYPE_INT64,
+ TYPE_SINT64: CPPTYPE_INT64,
+ TYPE_SFIXED64: CPPTYPE_INT64,
+ TYPE_UINT64: CPPTYPE_UINT64,
+ TYPE_FIXED64: CPPTYPE_UINT64,
+ TYPE_INT32: CPPTYPE_INT32,
+ TYPE_SFIXED32: CPPTYPE_INT32,
+ TYPE_SINT32: CPPTYPE_INT32,
+ TYPE_UINT32: CPPTYPE_UINT32,
+ TYPE_FIXED32: CPPTYPE_UINT32,
+ TYPE_BYTES: CPPTYPE_STRING,
+ TYPE_STRING: CPPTYPE_STRING,
+ TYPE_BOOL: CPPTYPE_BOOL,
+ TYPE_MESSAGE: CPPTYPE_MESSAGE,
+ TYPE_GROUP: CPPTYPE_MESSAGE
+ }
+
+ # Must be consistent with C++ FieldDescriptor::Label enum in
+ # descriptor.h.
+ #
+ # TODO: Find a way to eliminate this repetition.
+ LABEL_OPTIONAL = 1
+ LABEL_REQUIRED = 2
+ LABEL_REPEATED = 3
+ MAX_LABEL = 3
+
+ # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber,
+ # and kLastReservedNumber in descriptor.h
+ MAX_FIELD_NUMBER = (1 << 29) - 1
+ FIRST_RESERVED_FIELD_NUMBER = 19000
+ LAST_RESERVED_FIELD_NUMBER = 19999
+
+ if _USE_C_DESCRIPTORS:
+ _C_DESCRIPTOR_CLASS = _message.FieldDescriptor
+
+ def __new__(cls, name, full_name, index, number, type, cpp_type, label,
+ default_value, message_type, enum_type, containing_type,
+ is_extension, extension_scope, options=None,
+ serialized_options=None,
+ has_default_value=True, containing_oneof=None, json_name=None,
+ file=None, create_key=None): # pylint: disable=redefined-builtin
+ _message.Message._CheckCalledFromGeneratedFile()
+ if is_extension:
+ return _message.default_pool.FindExtensionByName(full_name)
+ else:
+ return _message.default_pool.FindFieldByName(full_name)
+
+ def __init__(self, name, full_name, index, number, type, cpp_type, label,
+ default_value, message_type, enum_type, containing_type,
+ is_extension, extension_scope, options=None,
+ serialized_options=None,
+ has_default_value=True, containing_oneof=None, json_name=None,
+ file=None, create_key=None): # pylint: disable=redefined-builtin
+ """The arguments are as described in the description of FieldDescriptor
+ attributes above.
+
+ Note that containing_type may be None, and may be set later if necessary
+ (to deal with circular references between message types, for example).
+ Likewise for extension_scope.
+ """
+ if create_key is not _internal_create_key:
+ _Deprecated('FieldDescriptor')
+
+ super(FieldDescriptor, self).__init__(
+ file, options, serialized_options, 'FieldOptions'
+ )
+ self.name = name
+ self.full_name = full_name
+ self._camelcase_name = None
+ if json_name is None:
+ self.json_name = _ToJsonName(name)
+ else:
+ self.json_name = json_name
+ self.index = index
+ self.number = number
+ self.type = type
+ self.cpp_type = cpp_type
+ self.label = label
+ self.has_default_value = has_default_value
+ self.default_value = default_value
+ self.containing_type = containing_type
+ self.message_type = message_type
+ self.enum_type = enum_type
+ self.is_extension = is_extension
+ self.extension_scope = extension_scope
+ self.containing_oneof = containing_oneof
+ if api_implementation.Type() == 'python':
+ self._cdescriptor = None
+ else:
+ if is_extension:
+ self._cdescriptor = _message.default_pool.FindExtensionByName(full_name)
+ else:
+ self._cdescriptor = _message.default_pool.FindFieldByName(full_name)
+
+ @property
+ def camelcase_name(self):
+ """Camelcase name of this field.
+
+ Returns:
+ str: the name in CamelCase.
+ """
+ if self._camelcase_name is None:
+ self._camelcase_name = _ToCamelCase(self.name)
+ return self._camelcase_name
+
+ @property
+ def has_presence(self):
+ """Whether the field distinguishes between unpopulated and default values.
+
+ Raises:
+ RuntimeError: singular field that is not linked with message nor file.
+ """
+ if self.label == FieldDescriptor.LABEL_REPEATED:
+ return False
+ if (self.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE or
+ self.containing_oneof):
+ return True
+ # self.containing_type is used here instead of self.file for legacy
+ # compatibility. FieldDescriptor.file was added in cl/153110619
+ # Some old/generated code didn't link file to FieldDescriptor.
+ # TODO: remove syntax usage b/240619313
+ return self.containing_type._deprecated_syntax == 'proto2'
+
+ @property
+ def is_packed(self):
+ """Returns if the field is packed."""
+ if self.label != FieldDescriptor.LABEL_REPEATED:
+ return False
+ field_type = self.type
+ if (field_type == FieldDescriptor.TYPE_STRING or
+ field_type == FieldDescriptor.TYPE_GROUP or
+ field_type == FieldDescriptor.TYPE_MESSAGE or
+ field_type == FieldDescriptor.TYPE_BYTES):
+ return False
+ if self.containing_type._deprecated_syntax == 'proto2':
+ return self.has_options and self.GetOptions().packed
+ else:
+ return (not self.has_options or
+ not self.GetOptions().HasField('packed') or
+ self.GetOptions().packed)
+
+ @staticmethod
+ def ProtoTypeToCppProtoType(proto_type):
+ """Converts from a Python proto type to a C++ Proto Type.
+
+ The Python ProtocolBuffer classes specify both the 'Python' datatype and the
+ 'C++' datatype - and they're not the same. This helper method should
+ translate from one to another.
+
+ Args:
+ proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*)
+ Returns:
+ int: descriptor.FieldDescriptor.CPPTYPE_*, the C++ type.
+ Raises:
+ TypeTransformationError: when the Python proto type isn't known.
+ """
+ try:
+ return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type]
+ except KeyError:
+ raise TypeTransformationError('Unknown proto_type: %s' % proto_type)
+
+
+class EnumDescriptor(_NestedDescriptorBase):
+
+ """Descriptor for an enum defined in a .proto file.
+
+ Attributes:
+ name (str): Name of the enum type.
+ full_name (str): Full name of the type, including package name
+ and any enclosing type(s).
+
+ values (list[EnumValueDescriptor]): List of the values
+ in this enum.
+ values_by_name (dict(str, EnumValueDescriptor)): Same as :attr:`values`,
+ but indexed by the "name" field of each EnumValueDescriptor.
+ values_by_number (dict(int, EnumValueDescriptor)): Same as :attr:`values`,
+ but indexed by the "number" field of each EnumValueDescriptor.
+ containing_type (Descriptor): Descriptor of the immediate containing
+ type of this enum, or None if this is an enum defined at the
+ top level in a .proto file. Set by Descriptor's constructor
+ if we're passed into one.
+ file (FileDescriptor): Reference to file descriptor.
+ options (descriptor_pb2.EnumOptions): Enum options message or
+ None to use default enum options.
+ """
+
+ if _USE_C_DESCRIPTORS:
+ _C_DESCRIPTOR_CLASS = _message.EnumDescriptor
+
+ def __new__(cls, name, full_name, filename, values,
+ containing_type=None, options=None,
+ serialized_options=None, file=None, # pylint: disable=redefined-builtin
+ serialized_start=None, serialized_end=None, create_key=None):
+ _message.Message._CheckCalledFromGeneratedFile()
+ return _message.default_pool.FindEnumTypeByName(full_name)
+
+ def __init__(self, name, full_name, filename, values,
+ containing_type=None, options=None,
+ serialized_options=None, file=None, # pylint: disable=redefined-builtin
+ serialized_start=None, serialized_end=None, create_key=None):
+ """Arguments are as described in the attribute description above.
+
+ Note that filename is an obsolete argument, that is not used anymore.
+ Please use file.name to access this as an attribute.
+ """
+ if create_key is not _internal_create_key:
+ _Deprecated('EnumDescriptor')
+
+ super(EnumDescriptor, self).__init__(
+ options, 'EnumOptions', name, full_name, file,
+ containing_type, serialized_start=serialized_start,
+ serialized_end=serialized_end, serialized_options=serialized_options)
+
+ self.values = values
+ for value in self.values:
+ value.file = file
+ value.type = self
+ self.values_by_name = dict((v.name, v) for v in values)
+ # Values are reversed to ensure that the first alias is retained.
+ self.values_by_number = dict((v.number, v) for v in reversed(values))
+
+ @property
+ def is_closed(self):
+ """Returns true whether this is a "closed" enum.
+
+ This means that it:
+ - Has a fixed set of values, rather than being equivalent to an int32.
+ - Encountering values not in this set causes them to be treated as unknown
+ fields.
+ - The first value (i.e., the default) may be nonzero.
+
+ WARNING: Some runtimes currently have a quirk where non-closed enums are
+ treated as closed when used as the type of fields defined in a
+ `syntax = proto2;` file. This quirk is not present in all runtimes; as of
+ writing, we know that:
+
+ - C++, Java, and C++-based Python share this quirk.
+ - UPB and UPB-based Python do not.
+ - PHP and Ruby treat all enums as open regardless of declaration.
+
+ Care should be taken when using this function to respect the target
+ runtime's enum handling quirks.
+ """
+ return self.file._deprecated_syntax == 'proto2'
+
+ def CopyToProto(self, proto):
+ """Copies this to a descriptor_pb2.EnumDescriptorProto.
+
+ Args:
+ proto (descriptor_pb2.EnumDescriptorProto): An empty descriptor proto.
+ """
+ # This function is overridden to give a better doc comment.
+ super(EnumDescriptor, self).CopyToProto(proto)
+
+
+class EnumValueDescriptor(DescriptorBase):
+
+ """Descriptor for a single value within an enum.
+
+ Attributes:
+ name (str): Name of this value.
+ index (int): Dense, 0-indexed index giving the order that this
+ value appears textually within its enum in the .proto file.
+ number (int): Actual number assigned to this enum value.
+ type (EnumDescriptor): :class:`EnumDescriptor` to which this value
+ belongs. Set by :class:`EnumDescriptor`'s constructor if we're
+ passed into one.
+ options (descriptor_pb2.EnumValueOptions): Enum value options message or
+ None to use default enum value options options.
+ """
+
+ if _USE_C_DESCRIPTORS:
+ _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor
+
+ def __new__(cls, name, index, number,
+ type=None, # pylint: disable=redefined-builtin
+ options=None, serialized_options=None, create_key=None):
+ _message.Message._CheckCalledFromGeneratedFile()
+ # There is no way we can build a complete EnumValueDescriptor with the
+ # given parameters (the name of the Enum is not known, for example).
+ # Fortunately generated files just pass it to the EnumDescriptor()
+ # constructor, which will ignore it, so returning None is good enough.
+ return None
+
+ def __init__(self, name, index, number,
+ type=None, # pylint: disable=redefined-builtin
+ options=None, serialized_options=None, create_key=None):
+ """Arguments are as described in the attribute description above."""
+ if create_key is not _internal_create_key:
+ _Deprecated('EnumValueDescriptor')
+
+ super(EnumValueDescriptor, self).__init__(
+ type.file if type else None,
+ options,
+ serialized_options,
+ 'EnumValueOptions',
+ )
+ self.name = name
+ self.index = index
+ self.number = number
+ self.type = type
+
+
+class OneofDescriptor(DescriptorBase):
+ """Descriptor for a oneof field.
+
+ Attributes:
+ name (str): Name of the oneof field.
+ full_name (str): Full name of the oneof field, including package name.
+ index (int): 0-based index giving the order of the oneof field inside
+ its containing type.
+ containing_type (Descriptor): :class:`Descriptor` of the protocol message
+ type that contains this field. Set by the :class:`Descriptor` constructor
+ if we're passed into one.
+ fields (list[FieldDescriptor]): The list of field descriptors this
+ oneof can contain.
+ """
+
+ if _USE_C_DESCRIPTORS:
+ _C_DESCRIPTOR_CLASS = _message.OneofDescriptor
+
+ def __new__(
+ cls, name, full_name, index, containing_type, fields, options=None,
+ serialized_options=None, create_key=None):
+ _message.Message._CheckCalledFromGeneratedFile()
+ return _message.default_pool.FindOneofByName(full_name)
+
+ def __init__(
+ self, name, full_name, index, containing_type, fields, options=None,
+ serialized_options=None, create_key=None):
+ """Arguments are as described in the attribute description above."""
+ if create_key is not _internal_create_key:
+ _Deprecated('OneofDescriptor')
+
+ super(OneofDescriptor, self).__init__(
+ containing_type.file if containing_type else None,
+ options,
+ serialized_options,
+ 'OneofOptions',
+ )
+ self.name = name
+ self.full_name = full_name
+ self.index = index
+ self.containing_type = containing_type
+ self.fields = fields
+
+
+class ServiceDescriptor(_NestedDescriptorBase):
+
+ """Descriptor for a service.
+
+ Attributes:
+ name (str): Name of the service.
+ full_name (str): Full name of the service, including package name.
+ index (int): 0-indexed index giving the order that this services
+ definition appears within the .proto file.
+ methods (list[MethodDescriptor]): List of methods provided by this
+ service.
+ methods_by_name (dict(str, MethodDescriptor)): Same
+ :class:`MethodDescriptor` objects as in :attr:`methods_by_name`, but
+ indexed by "name" attribute in each :class:`MethodDescriptor`.
+ options (descriptor_pb2.ServiceOptions): Service options message or
+ None to use default service options.
+ file (FileDescriptor): Reference to file info.
+ """
+
+ if _USE_C_DESCRIPTORS:
+ _C_DESCRIPTOR_CLASS = _message.ServiceDescriptor
+
+ def __new__(
+ cls,
+ name=None,
+ full_name=None,
+ index=None,
+ methods=None,
+ options=None,
+ serialized_options=None,
+ file=None, # pylint: disable=redefined-builtin
+ serialized_start=None,
+ serialized_end=None,
+ create_key=None):
+ _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access
+ return _message.default_pool.FindServiceByName(full_name)
+
+ def __init__(self, name, full_name, index, methods, options=None,
+ serialized_options=None, file=None, # pylint: disable=redefined-builtin
+ serialized_start=None, serialized_end=None, create_key=None):
+ if create_key is not _internal_create_key:
+ _Deprecated('ServiceDescriptor')
+
+ super(ServiceDescriptor, self).__init__(
+ options, 'ServiceOptions', name, full_name, file,
+ None, serialized_start=serialized_start,
+ serialized_end=serialized_end, serialized_options=serialized_options)
+ self.index = index
+ self.methods = methods
+ self.methods_by_name = dict((m.name, m) for m in methods)
+ # Set the containing service for each method in this service.
+ for method in self.methods:
+ method.file = self.file
+ method.containing_service = self
+
+ def FindMethodByName(self, name):
+ """Searches for the specified method, and returns its descriptor.
+
+ Args:
+ name (str): Name of the method.
+
+ Returns:
+ MethodDescriptor: The descriptor for the requested method.
+
+ Raises:
+ KeyError: if the method cannot be found in the service.
+ """
+ return self.methods_by_name[name]
+
+ def CopyToProto(self, proto):
+ """Copies this to a descriptor_pb2.ServiceDescriptorProto.
+
+ Args:
+ proto (descriptor_pb2.ServiceDescriptorProto): An empty descriptor proto.
+ """
+ # This function is overridden to give a better doc comment.
+ super(ServiceDescriptor, self).CopyToProto(proto)
+
+
+class MethodDescriptor(DescriptorBase):
+
+ """Descriptor for a method in a service.
+
+ Attributes:
+ name (str): Name of the method within the service.
+ full_name (str): Full name of method.
+ index (int): 0-indexed index of the method inside the service.
+ containing_service (ServiceDescriptor): The service that contains this
+ method.
+ input_type (Descriptor): The descriptor of the message that this method
+ accepts.
+ output_type (Descriptor): The descriptor of the message that this method
+ returns.
+ client_streaming (bool): Whether this method uses client streaming.
+ server_streaming (bool): Whether this method uses server streaming.
+ options (descriptor_pb2.MethodOptions or None): Method options message, or
+ None to use default method options.
+ """
+
+ if _USE_C_DESCRIPTORS:
+ _C_DESCRIPTOR_CLASS = _message.MethodDescriptor
+
+ def __new__(cls,
+ name,
+ full_name,
+ index,
+ containing_service,
+ input_type,
+ output_type,
+ client_streaming=False,
+ server_streaming=False,
+ options=None,
+ serialized_options=None,
+ create_key=None):
+ _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access
+ return _message.default_pool.FindMethodByName(full_name)
+
+ def __init__(self,
+ name,
+ full_name,
+ index,
+ containing_service,
+ input_type,
+ output_type,
+ client_streaming=False,
+ server_streaming=False,
+ options=None,
+ serialized_options=None,
+ create_key=None):
+ """The arguments are as described in the description of MethodDescriptor
+ attributes above.
+
+ Note that containing_service may be None, and may be set later if necessary.
+ """
+ if create_key is not _internal_create_key:
+ _Deprecated('MethodDescriptor')
+
+ super(MethodDescriptor, self).__init__(
+ containing_service.file if containing_service else None,
+ options,
+ serialized_options,
+ 'MethodOptions',
+ )
+ self.name = name
+ self.full_name = full_name
+ self.index = index
+ self.containing_service = containing_service
+ self.input_type = input_type
+ self.output_type = output_type
+ self.client_streaming = client_streaming
+ self.server_streaming = server_streaming
+
+ def CopyToProto(self, proto):
+ """Copies this to a descriptor_pb2.MethodDescriptorProto.
+
+ Args:
+ proto (descriptor_pb2.MethodDescriptorProto): An empty descriptor proto.
+
+ Raises:
+ Error: If self couldn't be serialized, due to too few constructor
+ arguments.
+ """
+ if self.containing_service is not None:
+ from google.protobuf import descriptor_pb2
+ service_proto = descriptor_pb2.ServiceDescriptorProto()
+ self.containing_service.CopyToProto(service_proto)
+ proto.CopyFrom(service_proto.method[self.index])
+ else:
+ raise Error('Descriptor does not contain a service.')
+
+
+class FileDescriptor(DescriptorBase):
+ """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto.
+
+ Note that :attr:`enum_types_by_name`, :attr:`extensions_by_name`, and
+ :attr:`dependencies` fields are only set by the
+ :py:mod:`google.protobuf.message_factory` module, and not by the generated
+ proto code.
+
+ Attributes:
+ name (str): Name of file, relative to root of source tree.
+ package (str): Name of the package
+ syntax (str): string indicating syntax of the file (can be "proto2" or
+ "proto3")
+ serialized_pb (bytes): Byte string of serialized
+ :class:`descriptor_pb2.FileDescriptorProto`.
+ dependencies (list[FileDescriptor]): List of other :class:`FileDescriptor`
+ objects this :class:`FileDescriptor` depends on.
+ public_dependencies (list[FileDescriptor]): A subset of
+ :attr:`dependencies`, which were declared as "public".
+ message_types_by_name (dict(str, Descriptor)): Mapping from message names
+ to their :class:`Descriptor`.
+ enum_types_by_name (dict(str, EnumDescriptor)): Mapping from enum names to
+ their :class:`EnumDescriptor`.
+ extensions_by_name (dict(str, FieldDescriptor)): Mapping from extension
+ names declared at file scope to their :class:`FieldDescriptor`.
+ services_by_name (dict(str, ServiceDescriptor)): Mapping from services'
+ names to their :class:`ServiceDescriptor`.
+ pool (DescriptorPool): The pool this descriptor belongs to. When not
+ passed to the constructor, the global default pool is used.
+ """
+
+ if _USE_C_DESCRIPTORS:
+ _C_DESCRIPTOR_CLASS = _message.FileDescriptor
+
+ def __new__(cls, name, package, options=None,
+ serialized_options=None, serialized_pb=None,
+ dependencies=None, public_dependencies=None,
+ syntax=None, pool=None, create_key=None):
+ # FileDescriptor() is called from various places, not only from generated
+ # files, to register dynamic proto files and messages.
+ # pylint: disable=g-explicit-bool-comparison
+ if serialized_pb:
+ return _message.default_pool.AddSerializedFile(serialized_pb)
+ else:
+ return super(FileDescriptor, cls).__new__(cls)
+
+ def __init__(self, name, package, options=None,
+ serialized_options=None, serialized_pb=None,
+ dependencies=None, public_dependencies=None,
+ syntax=None, pool=None, create_key=None):
+ """Constructor."""
+ if create_key is not _internal_create_key:
+ _Deprecated('FileDescriptor')
+
+ super(FileDescriptor, self).__init__(
+ None, options, serialized_options, 'FileOptions'
+ )
+
+ if pool is None:
+ from google.protobuf import descriptor_pool
+ pool = descriptor_pool.Default()
+ self.pool = pool
+ self.message_types_by_name = {}
+ self.name = name
+ self.package = package
+ self._deprecated_syntax = syntax or "proto2"
+ self.serialized_pb = serialized_pb
+
+ self.enum_types_by_name = {}
+ self.extensions_by_name = {}
+ self.services_by_name = {}
+ self.dependencies = (dependencies or [])
+ self.public_dependencies = (public_dependencies or [])
+
+ @property
+ def syntax(self):
+ warnings.warn(
+ 'descriptor.syntax is deprecated. It will be removed'
+ ' soon. Most usages are checking field descriptors. Consider to use'
+ ' has_presence, is_packed on field descriptors.'
+ )
+ return self._deprecated_syntax
+
+ def CopyToProto(self, proto):
+ """Copies this to a descriptor_pb2.FileDescriptorProto.
+
+ Args:
+ proto: An empty descriptor_pb2.FileDescriptorProto.
+ """
+ proto.ParseFromString(self.serialized_pb)
+
+
+def _ParseOptions(message, string):
+ """Parses serialized options.
+
+ This helper function is used to parse serialized options in generated
+ proto2 files. It must not be used outside proto2.
+ """
+ message.ParseFromString(string)
+ return message
+
+
+def _ToCamelCase(name):
+ """Converts name to camel-case and returns it."""
+ capitalize_next = False
+ result = []
+
+ for c in name:
+ if c == '_':
+ if result:
+ capitalize_next = True
+ elif capitalize_next:
+ result.append(c.upper())
+ capitalize_next = False
+ else:
+ result += c
+
+ # Lower-case the first letter.
+ if result and result[0].isupper():
+ result[0] = result[0].lower()
+ return ''.join(result)
+
+
+def _OptionsOrNone(descriptor_proto):
+ """Returns the value of the field `options`, or None if it is not set."""
+ if descriptor_proto.HasField('options'):
+ return descriptor_proto.options
+ else:
+ return None
+
+
+def _ToJsonName(name):
+ """Converts name to Json name and returns it."""
+ capitalize_next = False
+ result = []
+
+ for c in name:
+ if c == '_':
+ capitalize_next = True
+ elif capitalize_next:
+ result.append(c.upper())
+ capitalize_next = False
+ else:
+ result += c
+
+ return ''.join(result)
+
+
+def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True,
+ syntax=None):
+ """Make a protobuf Descriptor given a DescriptorProto protobuf.
+
+ Handles nested descriptors. Note that this is limited to the scope of defining
+ a message inside of another message. Composite fields can currently only be
+ resolved if the message is defined in the same scope as the field.
+
+ Args:
+ desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
+ package: Optional package name for the new message Descriptor (string).
+ build_file_if_cpp: Update the C++ descriptor pool if api matches.
+ Set to False on recursion, so no duplicates are created.
+ syntax: The syntax/semantics that should be used. Set to "proto3" to get
+ proto3 field presence semantics.
+ Returns:
+ A Descriptor for protobuf messages.
+ """
+ if api_implementation.Type() != 'python' and build_file_if_cpp:
+ # The C++ implementation requires all descriptors to be backed by the same
+ # definition in the C++ descriptor pool. To do this, we build a
+ # FileDescriptorProto with the same definition as this descriptor and build
+ # it into the pool.
+ from google.protobuf import descriptor_pb2
+ file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
+ file_descriptor_proto.message_type.add().MergeFrom(desc_proto)
+
+ # Generate a random name for this proto file to prevent conflicts with any
+ # imported ones. We need to specify a file name so the descriptor pool
+ # accepts our FileDescriptorProto, but it is not important what that file
+ # name is actually set to.
+ proto_name = binascii.hexlify(os.urandom(16)).decode('ascii')
+
+ if package:
+ file_descriptor_proto.name = os.path.join(package.replace('.', '/'),
+ proto_name + '.proto')
+ file_descriptor_proto.package = package
+ else:
+ file_descriptor_proto.name = proto_name + '.proto'
+
+ _message.default_pool.Add(file_descriptor_proto)
+ result = _message.default_pool.FindFileByName(file_descriptor_proto.name)
+
+ if _USE_C_DESCRIPTORS:
+ return result.message_types_by_name[desc_proto.name]
+
+ full_message_name = [desc_proto.name]
+ if package: full_message_name.insert(0, package)
+
+ # Create Descriptors for enum types
+ enum_types = {}
+ for enum_proto in desc_proto.enum_type:
+ full_name = '.'.join(full_message_name + [enum_proto.name])
+ enum_desc = EnumDescriptor(
+ enum_proto.name, full_name, None, [
+ EnumValueDescriptor(enum_val.name, ii, enum_val.number,
+ create_key=_internal_create_key)
+ for ii, enum_val in enumerate(enum_proto.value)],
+ create_key=_internal_create_key)
+ enum_types[full_name] = enum_desc
+
+ # Create Descriptors for nested types
+ nested_types = {}
+ for nested_proto in desc_proto.nested_type:
+ full_name = '.'.join(full_message_name + [nested_proto.name])
+ # Nested types are just those defined inside of the message, not all types
+ # used by fields in the message, so no loops are possible here.
+ nested_desc = MakeDescriptor(nested_proto,
+ package='.'.join(full_message_name),
+ build_file_if_cpp=False,
+ syntax=syntax)
+ nested_types[full_name] = nested_desc
+
+ fields = []
+ for field_proto in desc_proto.field:
+ full_name = '.'.join(full_message_name + [field_proto.name])
+ enum_desc = None
+ nested_desc = None
+ if field_proto.json_name:
+ json_name = field_proto.json_name
+ else:
+ json_name = None
+ if field_proto.HasField('type_name'):
+ type_name = field_proto.type_name
+ full_type_name = '.'.join(full_message_name +
+ [type_name[type_name.rfind('.')+1:]])
+ if full_type_name in nested_types:
+ nested_desc = nested_types[full_type_name]
+ elif full_type_name in enum_types:
+ enum_desc = enum_types[full_type_name]
+ # Else type_name references a non-local type, which isn't implemented
+ field = FieldDescriptor(
+ field_proto.name, full_name, field_proto.number - 1,
+ field_proto.number, field_proto.type,
+ FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type),
+ field_proto.label, None, nested_desc, enum_desc, None, False, None,
+ options=_OptionsOrNone(field_proto), has_default_value=False,
+ json_name=json_name, create_key=_internal_create_key)
+ fields.append(field)
+
+ desc_name = '.'.join(full_message_name)
+ return Descriptor(desc_proto.name, desc_name, None, None, fields,
+ list(nested_types.values()), list(enum_types.values()), [],
+ options=_OptionsOrNone(desc_proto),
+ create_key=_internal_create_key)
diff --git a/Lib/site-packages/google/protobuf/descriptor_database.py b/Lib/site-packages/google/protobuf/descriptor_database.py
new file mode 100644
index 0000000..46a893e
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/descriptor_database.py
@@ -0,0 +1,154 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Provides a container for DescriptorProtos."""
+
+__author__ = 'matthewtoia@google.com (Matt Toia)'
+
+import warnings
+
+
+class Error(Exception):
+ pass
+
+
+class DescriptorDatabaseConflictingDefinitionError(Error):
+ """Raised when a proto is added with the same name & different descriptor."""
+
+
+class DescriptorDatabase(object):
+ """A container accepting FileDescriptorProtos and maps DescriptorProtos."""
+
+ def __init__(self):
+ self._file_desc_protos_by_file = {}
+ self._file_desc_protos_by_symbol = {}
+
+ def Add(self, file_desc_proto):
+ """Adds the FileDescriptorProto and its types to this database.
+
+ Args:
+ file_desc_proto: The FileDescriptorProto to add.
+ Raises:
+ DescriptorDatabaseConflictingDefinitionError: if an attempt is made to
+ add a proto with the same name but different definition than an
+ existing proto in the database.
+ """
+ proto_name = file_desc_proto.name
+ if proto_name not in self._file_desc_protos_by_file:
+ self._file_desc_protos_by_file[proto_name] = file_desc_proto
+ elif self._file_desc_protos_by_file[proto_name] != file_desc_proto:
+ raise DescriptorDatabaseConflictingDefinitionError(
+ '%s already added, but with different descriptor.' % proto_name)
+ else:
+ return
+
+ # Add all the top-level descriptors to the index.
+ package = file_desc_proto.package
+ for message in file_desc_proto.message_type:
+ for name in _ExtractSymbols(message, package):
+ self._AddSymbol(name, file_desc_proto)
+ for enum in file_desc_proto.enum_type:
+ self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto)
+ for enum_value in enum.value:
+ self._file_desc_protos_by_symbol[
+ '.'.join((package, enum_value.name))] = file_desc_proto
+ for extension in file_desc_proto.extension:
+ self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto)
+ for service in file_desc_proto.service:
+ self._AddSymbol(('.'.join((package, service.name))), file_desc_proto)
+
+ def FindFileByName(self, name):
+ """Finds the file descriptor proto by file name.
+
+ Typically the file name is a relative path ending to a .proto file. The
+ proto with the given name will have to have been added to this database
+ using the Add method or else an error will be raised.
+
+ Args:
+ name: The file name to find.
+
+ Returns:
+ The file descriptor proto matching the name.
+
+ Raises:
+ KeyError if no file by the given name was added.
+ """
+
+ return self._file_desc_protos_by_file[name]
+
+ def FindFileContainingSymbol(self, symbol):
+ """Finds the file descriptor proto containing the specified symbol.
+
+ The symbol should be a fully qualified name including the file descriptor's
+ package and any containing messages. Some examples:
+
+ 'some.package.name.Message'
+ 'some.package.name.Message.NestedEnum'
+ 'some.package.name.Message.some_field'
+
+ The file descriptor proto containing the specified symbol must be added to
+ this database using the Add method or else an error will be raised.
+
+ Args:
+ symbol: The fully qualified symbol name.
+
+ Returns:
+ The file descriptor proto containing the symbol.
+
+ Raises:
+ KeyError if no file contains the specified symbol.
+ """
+ try:
+ return self._file_desc_protos_by_symbol[symbol]
+ except KeyError:
+ # Fields, enum values, and nested extensions are not in
+ # _file_desc_protos_by_symbol. Try to find the top level
+ # descriptor. Non-existent nested symbol under a valid top level
+ # descriptor can also be found. The behavior is the same with
+ # protobuf C++.
+ top_level, _, _ = symbol.rpartition('.')
+ try:
+ return self._file_desc_protos_by_symbol[top_level]
+ except KeyError:
+ # Raise the original symbol as a KeyError for better diagnostics.
+ raise KeyError(symbol)
+
+ def FindFileContainingExtension(self, extendee_name, extension_number):
+ # TODO: implement this API.
+ return None
+
+ def FindAllExtensionNumbers(self, extendee_name):
+ # TODO: implement this API.
+ return []
+
+ def _AddSymbol(self, name, file_desc_proto):
+ if name in self._file_desc_protos_by_symbol:
+ warn_msg = ('Conflict register for file "' + file_desc_proto.name +
+ '": ' + name +
+ ' is already defined in file "' +
+ self._file_desc_protos_by_symbol[name].name + '"')
+ warnings.warn(warn_msg, RuntimeWarning)
+ self._file_desc_protos_by_symbol[name] = file_desc_proto
+
+
+def _ExtractSymbols(desc_proto, package):
+ """Pulls out all the symbols from a descriptor proto.
+
+ Args:
+ desc_proto: The proto to extract symbols from.
+ package: The package containing the descriptor type.
+
+ Yields:
+ The fully qualified name found in the descriptor.
+ """
+ message_name = package + '.' + desc_proto.name if package else desc_proto.name
+ yield message_name
+ for nested_type in desc_proto.nested_type:
+ for symbol in _ExtractSymbols(nested_type, message_name):
+ yield symbol
+ for enum_type in desc_proto.enum_type:
+ yield '.'.join((message_name, enum_type.name))
diff --git a/Lib/site-packages/google/protobuf/descriptor_pb2.py b/Lib/site-packages/google/protobuf/descriptor_pb2.py
new file mode 100644
index 0000000..d53d2e4
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/descriptor_pb2.py
@@ -0,0 +1,2790 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/descriptor.proto
+# Protobuf Python Version: 4.25.2
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+ DESCRIPTOR = _descriptor.FileDescriptor(
+ name='google/protobuf/descriptor.proto',
+ package='google.protobuf',
+ syntax='proto2',
+ serialized_options=b'\n\023com.google.protobufB\020DescriptorProtosH\001Z-google.golang.org/protobuf/types/descriptorpb\370\001\001\242\002\003GPB\252\002\032Google.Protobuf.Reflection',
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"M\n\x11\x46ileDescriptorSet\x12\x38\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProtoR\x04\x66ile\"\x98\x05\n\x13\x46ileDescriptorProto\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x18\n\x07package\x18\x02 \x01(\tR\x07package\x12\x1e\n\ndependency\x18\x03 \x03(\tR\ndependency\x12+\n\x11public_dependency\x18\n \x03(\x05R\x10publicDependency\x12\'\n\x0fweak_dependency\x18\x0b \x03(\x05R\x0eweakDependency\x12\x43\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProtoR\x0bmessageType\x12\x41\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProtoR\x08\x65numType\x12\x41\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProtoR\x07service\x12\x43\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProtoR\textension\x12\x36\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptionsR\x07options\x12I\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfoR\x0esourceCodeInfo\x12\x16\n\x06syntax\x18\x0c \x01(\tR\x06syntax\x12\x32\n\x07\x65\x64ition\x18\x0e \x01(\x0e\x32\x18.google.protobuf.EditionR\x07\x65\x64ition\"\xb9\x06\n\x0f\x44\x65scriptorProto\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12;\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProtoR\x05\x66ield\x12\x43\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProtoR\textension\x12\x41\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProtoR\nnestedType\x12\x41\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProtoR\x08\x65numType\x12X\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRangeR\x0e\x65xtensionRange\x12\x44\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProtoR\toneofDecl\x12\x39\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptionsR\x07options\x12U\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRangeR\rreservedRange\x12#\n\rreserved_name\x18\n \x03(\tR\x0creservedName\x1az\n\x0e\x45xtensionRange\x12\x14\n\x05start\x18\x01 \x01(\x05R\x05start\x12\x10\n\x03\x65nd\x18\x02 \x01(\x05R\x03\x65nd\x12@\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptionsR\x07options\x1a\x37\n\rReservedRange\x12\x14\n\x05start\x18\x01 \x01(\x05R\x05start\x12\x10\n\x03\x65nd\x18\x02 \x01(\x05R\x03\x65nd\"\xc7\x04\n\x15\x45xtensionRangeOptions\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption\x12Y\n\x0b\x64\x65\x63laration\x18\x02 \x03(\x0b\x32\x32.google.protobuf.ExtensionRangeOptions.DeclarationB\x03\x88\x01\x02R\x0b\x64\x65\x63laration\x12\x37\n\x08\x66\x65\x61tures\x18\x32 \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12h\n\x0cverification\x18\x03 \x01(\x0e\x32\x38.google.protobuf.ExtensionRangeOptions.VerificationState:\nUNVERIFIEDR\x0cverification\x1a\x94\x01\n\x0b\x44\x65\x63laration\x12\x16\n\x06number\x18\x01 \x01(\x05R\x06number\x12\x1b\n\tfull_name\x18\x02 \x01(\tR\x08\x66ullName\x12\x12\n\x04type\x18\x03 \x01(\tR\x04type\x12\x1a\n\x08reserved\x18\x05 \x01(\x08R\x08reserved\x12\x1a\n\x08repeated\x18\x06 \x01(\x08R\x08repeatedJ\x04\x08\x04\x10\x05\"4\n\x11VerificationState\x12\x0f\n\x0b\x44\x45\x43LARATION\x10\x00\x12\x0e\n\nUNVERIFIED\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xc1\x06\n\x14\x46ieldDescriptorProto\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x16\n\x06number\x18\x03 \x01(\x05R\x06number\x12\x41\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.LabelR\x05label\x12>\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.TypeR\x04type\x12\x1b\n\ttype_name\x18\x06 \x01(\tR\x08typeName\x12\x1a\n\x08\x65xtendee\x18\x02 \x01(\tR\x08\x65xtendee\x12#\n\rdefault_value\x18\x07 \x01(\tR\x0c\x64\x65\x66\x61ultValue\x12\x1f\n\x0boneof_index\x18\t \x01(\x05R\noneofIndex\x12\x1b\n\tjson_name\x18\n \x01(\tR\x08jsonName\x12\x37\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptionsR\x07options\x12\'\n\x0fproto3_optional\x18\x11 \x01(\x08R\x0eproto3Optional\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REPEATED\x10\x03\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\"c\n\x14OneofDescriptorProto\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x37\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptionsR\x07options\"\xe3\x02\n\x13\x45numDescriptorProto\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12?\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProtoR\x05value\x12\x36\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptionsR\x07options\x12]\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRangeR\rreservedRange\x12#\n\rreserved_name\x18\x05 \x03(\tR\x0creservedName\x1a;\n\x11\x45numReservedRange\x12\x14\n\x05start\x18\x01 \x01(\x05R\x05start\x12\x10\n\x03\x65nd\x18\x02 \x01(\x05R\x03\x65nd\"\x83\x01\n\x18\x45numValueDescriptorProto\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x16\n\x06number\x18\x02 \x01(\x05R\x06number\x12;\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptionsR\x07options\"\xa7\x01\n\x16ServiceDescriptorProto\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12>\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProtoR\x06method\x12\x39\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptionsR\x07options\"\x89\x02\n\x15MethodDescriptorProto\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1d\n\ninput_type\x18\x02 \x01(\tR\tinputType\x12\x1f\n\x0boutput_type\x18\x03 \x01(\tR\noutputType\x12\x38\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptionsR\x07options\x12\x30\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lseR\x0f\x63lientStreaming\x12\x30\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lseR\x0fserverStreaming\"\xca\t\n\x0b\x46ileOptions\x12!\n\x0cjava_package\x18\x01 \x01(\tR\x0bjavaPackage\x12\x30\n\x14java_outer_classname\x18\x08 \x01(\tR\x12javaOuterClassname\x12\x35\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lseR\x11javaMultipleFiles\x12\x44\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01R\x19javaGenerateEqualsAndHash\x12:\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lseR\x13javaStringCheckUtf8\x12S\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEEDR\x0boptimizeFor\x12\x1d\n\ngo_package\x18\x0b \x01(\tR\tgoPackage\x12\x35\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lseR\x11\x63\x63GenericServices\x12\x39\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lseR\x13javaGenericServices\x12\x35\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lseR\x11pyGenericServices\x12\x37\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lseR\x12phpGenericServices\x12%\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lseR\ndeprecated\x12.\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04trueR\x0e\x63\x63\x45nableArenas\x12*\n\x11objc_class_prefix\x18$ \x01(\tR\x0fobjcClassPrefix\x12)\n\x10\x63sharp_namespace\x18% \x01(\tR\x0f\x63sharpNamespace\x12!\n\x0cswift_prefix\x18\' \x01(\tR\x0bswiftPrefix\x12(\n\x10php_class_prefix\x18( \x01(\tR\x0ephpClassPrefix\x12#\n\rphp_namespace\x18) \x01(\tR\x0cphpNamespace\x12\x34\n\x16php_metadata_namespace\x18, \x01(\tR\x14phpMetadataNamespace\x12!\n\x0cruby_package\x18- \x01(\tR\x0brubyPackage\x12\x37\n\x08\x66\x65\x61tures\x18\x32 \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\xf4\x03\n\x0eMessageOptions\x12<\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lseR\x14messageSetWireFormat\x12L\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lseR\x1cnoStandardDescriptorAccessor\x12%\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lseR\ndeprecated\x12\x1b\n\tmap_entry\x18\x07 \x01(\x08R\x08mapEntry\x12V\n&deprecated_legacy_json_field_conflicts\x18\x0b \x01(\x08\x42\x02\x18\x01R\"deprecatedLegacyJsonFieldConflicts\x12\x37\n\x08\x66\x65\x61tures\x18\x0c \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xad\n\n\x0c\x46ieldOptions\x12\x41\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRINGR\x05\x63type\x12\x16\n\x06packed\x18\x02 \x01(\x08R\x06packed\x12G\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMALR\x06jstype\x12\x19\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lseR\x04lazy\x12.\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lseR\x0eunverifiedLazy\x12%\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lseR\ndeprecated\x12\x19\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lseR\x04weak\x12(\n\x0c\x64\x65\x62ug_redact\x18\x10 \x01(\x08:\x05\x66\x61lseR\x0b\x64\x65\x62ugRedact\x12K\n\tretention\x18\x11 \x01(\x0e\x32-.google.protobuf.FieldOptions.OptionRetentionR\tretention\x12H\n\x07targets\x18\x13 \x03(\x0e\x32..google.protobuf.FieldOptions.OptionTargetTypeR\x07targets\x12W\n\x10\x65\x64ition_defaults\x18\x14 \x03(\x0b\x32,.google.protobuf.FieldOptions.EditionDefaultR\x0f\x65\x64itionDefaults\x12\x37\n\x08\x66\x65\x61tures\x18\x15 \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption\x1aZ\n\x0e\x45\x64itionDefault\x12\x32\n\x07\x65\x64ition\x18\x03 \x01(\x0e\x32\x18.google.protobuf.EditionR\x07\x65\x64ition\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02\"U\n\x0fOptionRetention\x12\x15\n\x11RETENTION_UNKNOWN\x10\x00\x12\x15\n\x11RETENTION_RUNTIME\x10\x01\x12\x14\n\x10RETENTION_SOURCE\x10\x02\"\x8c\x02\n\x10OptionTargetType\x12\x17\n\x13TARGET_TYPE_UNKNOWN\x10\x00\x12\x14\n\x10TARGET_TYPE_FILE\x10\x01\x12\x1f\n\x1bTARGET_TYPE_EXTENSION_RANGE\x10\x02\x12\x17\n\x13TARGET_TYPE_MESSAGE\x10\x03\x12\x15\n\x11TARGET_TYPE_FIELD\x10\x04\x12\x15\n\x11TARGET_TYPE_ONEOF\x10\x05\x12\x14\n\x10TARGET_TYPE_ENUM\x10\x06\x12\x1a\n\x16TARGET_TYPE_ENUM_ENTRY\x10\x07\x12\x17\n\x13TARGET_TYPE_SERVICE\x10\x08\x12\x16\n\x12TARGET_TYPE_METHOD\x10\t*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x12\x10\x13\"\xac\x01\n\x0cOneofOptions\x12\x37\n\x08\x66\x65\x61tures\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd1\x02\n\x0b\x45numOptions\x12\x1f\n\x0b\x61llow_alias\x18\x02 \x01(\x08R\nallowAlias\x12%\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lseR\ndeprecated\x12V\n&deprecated_legacy_json_field_conflicts\x18\x06 \x01(\x08\x42\x02\x18\x01R\"deprecatedLegacyJsonFieldConflicts\x12\x37\n\x08\x66\x65\x61tures\x18\x07 \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"\x81\x02\n\x10\x45numValueOptions\x12%\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lseR\ndeprecated\x12\x37\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12(\n\x0c\x64\x65\x62ug_redact\x18\x03 \x01(\x08:\x05\x66\x61lseR\x0b\x64\x65\x62ugRedact\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x01\n\x0eServiceOptions\x12\x37\n\x08\x66\x65\x61tures\x18\" \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12%\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lseR\ndeprecated\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x99\x03\n\rMethodOptions\x12%\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lseR\ndeprecated\x12q\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWNR\x10idempotencyLevel\x12\x37\n\x08\x66\x65\x61tures\x18# \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9a\x03\n\x13UninterpretedOption\x12\x41\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePartR\x04name\x12)\n\x10identifier_value\x18\x03 \x01(\tR\x0fidentifierValue\x12,\n\x12positive_int_value\x18\x04 \x01(\x04R\x10positiveIntValue\x12,\n\x12negative_int_value\x18\x05 \x01(\x03R\x10negativeIntValue\x12!\n\x0c\x64ouble_value\x18\x06 \x01(\x01R\x0b\x64oubleValue\x12!\n\x0cstring_value\x18\x07 \x01(\x0cR\x0bstringValue\x12\'\n\x0f\x61ggregate_value\x18\x08 \x01(\tR\x0e\x61ggregateValue\x1aJ\n\x08NamePart\x12\x1b\n\tname_part\x18\x01 \x02(\tR\x08namePart\x12!\n\x0cis_extension\x18\x02 \x02(\x08R\x0bisExtension\"\xfc\t\n\nFeatureSet\x12\x8b\x01\n\x0e\x66ield_presence\x18\x01 \x01(\x0e\x32).google.protobuf.FeatureSet.FieldPresenceB9\x88\x01\x01\x98\x01\x04\x98\x01\x01\xa2\x01\r\x12\x08\x45XPLICIT\x18\xe6\x07\xa2\x01\r\x12\x08IMPLICIT\x18\xe7\x07\xa2\x01\r\x12\x08\x45XPLICIT\x18\xe8\x07R\rfieldPresence\x12\x66\n\tenum_type\x18\x02 \x01(\x0e\x32$.google.protobuf.FeatureSet.EnumTypeB#\x88\x01\x01\x98\x01\x06\x98\x01\x01\xa2\x01\x0b\x12\x06\x43LOSED\x18\xe6\x07\xa2\x01\t\x12\x04OPEN\x18\xe7\x07R\x08\x65numType\x12\x92\x01\n\x17repeated_field_encoding\x18\x03 \x01(\x0e\x32\x31.google.protobuf.FeatureSet.RepeatedFieldEncodingB\'\x88\x01\x01\x98\x01\x04\x98\x01\x01\xa2\x01\r\x12\x08\x45XPANDED\x18\xe6\x07\xa2\x01\x0b\x12\x06PACKED\x18\xe7\x07R\x15repeatedFieldEncoding\x12x\n\x0futf8_validation\x18\x04 \x01(\x0e\x32*.google.protobuf.FeatureSet.Utf8ValidationB#\x88\x01\x01\x98\x01\x04\x98\x01\x01\xa2\x01\t\x12\x04NONE\x18\xe6\x07\xa2\x01\x0b\x12\x06VERIFY\x18\xe7\x07R\x0eutf8Validation\x12x\n\x10message_encoding\x18\x05 \x01(\x0e\x32+.google.protobuf.FeatureSet.MessageEncodingB \x88\x01\x01\x98\x01\x04\x98\x01\x01\xa2\x01\x14\x12\x0fLENGTH_PREFIXED\x18\xe6\x07R\x0fmessageEncoding\x12|\n\x0bjson_format\x18\x06 \x01(\x0e\x32&.google.protobuf.FeatureSet.JsonFormatB3\x88\x01\x01\x98\x01\x03\x98\x01\x06\x98\x01\x01\xa2\x01\x17\x12\x12LEGACY_BEST_EFFORT\x18\xe6\x07\xa2\x01\n\x12\x05\x41LLOW\x18\xe7\x07R\njsonFormat\"\\\n\rFieldPresence\x12\x1a\n\x16\x46IELD_PRESENCE_UNKNOWN\x10\x00\x12\x0c\n\x08\x45XPLICIT\x10\x01\x12\x0c\n\x08IMPLICIT\x10\x02\x12\x13\n\x0fLEGACY_REQUIRED\x10\x03\"7\n\x08\x45numType\x12\x15\n\x11\x45NUM_TYPE_UNKNOWN\x10\x00\x12\x08\n\x04OPEN\x10\x01\x12\n\n\x06\x43LOSED\x10\x02\"V\n\x15RepeatedFieldEncoding\x12#\n\x1fREPEATED_FIELD_ENCODING_UNKNOWN\x10\x00\x12\n\n\x06PACKED\x10\x01\x12\x0c\n\x08\x45XPANDED\x10\x02\"C\n\x0eUtf8Validation\x12\x1b\n\x17UTF8_VALIDATION_UNKNOWN\x10\x00\x12\x08\n\x04NONE\x10\x01\x12\n\n\x06VERIFY\x10\x02\"S\n\x0fMessageEncoding\x12\x1c\n\x18MESSAGE_ENCODING_UNKNOWN\x10\x00\x12\x13\n\x0fLENGTH_PREFIXED\x10\x01\x12\r\n\tDELIMITED\x10\x02\"H\n\nJsonFormat\x12\x17\n\x13JSON_FORMAT_UNKNOWN\x10\x00\x12\t\n\x05\x41LLOW\x10\x01\x12\x16\n\x12LEGACY_BEST_EFFORT\x10\x02*\x06\x08\xe8\x07\x10\xe9\x07*\x06\x08\xe9\x07\x10\xea\x07*\x06\x08\x8bN\x10\x90NJ\x06\x08\xe7\x07\x10\xe8\x07\"\xfe\x02\n\x12\x46\x65\x61tureSetDefaults\x12X\n\x08\x64\x65\x66\x61ults\x18\x01 \x03(\x0b\x32<.google.protobuf.FeatureSetDefaults.FeatureSetEditionDefaultR\x08\x64\x65\x66\x61ults\x12\x41\n\x0fminimum_edition\x18\x04 \x01(\x0e\x32\x18.google.protobuf.EditionR\x0eminimumEdition\x12\x41\n\x0fmaximum_edition\x18\x05 \x01(\x0e\x32\x18.google.protobuf.EditionR\x0emaximumEdition\x1a\x87\x01\n\x18\x46\x65\x61tureSetEditionDefault\x12\x32\n\x07\x65\x64ition\x18\x03 \x01(\x0e\x32\x18.google.protobuf.EditionR\x07\x65\x64ition\x12\x37\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\"\xa7\x02\n\x0eSourceCodeInfo\x12\x44\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.LocationR\x08location\x1a\xce\x01\n\x08Location\x12\x16\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01R\x04path\x12\x16\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01R\x04span\x12)\n\x10leading_comments\x18\x03 \x01(\tR\x0fleadingComments\x12+\n\x11trailing_comments\x18\x04 \x01(\tR\x10trailingComments\x12:\n\x19leading_detached_comments\x18\x06 \x03(\tR\x17leadingDetachedComments\"\xd0\x02\n\x11GeneratedCodeInfo\x12M\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.AnnotationR\nannotation\x1a\xeb\x01\n\nAnnotation\x12\x16\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01R\x04path\x12\x1f\n\x0bsource_file\x18\x02 \x01(\tR\nsourceFile\x12\x14\n\x05\x62\x65gin\x18\x03 \x01(\x05R\x05\x62\x65gin\x12\x10\n\x03\x65nd\x18\x04 \x01(\x05R\x03\x65nd\x12R\n\x08semantic\x18\x05 \x01(\x0e\x32\x36.google.protobuf.GeneratedCodeInfo.Annotation.SemanticR\x08semantic\"(\n\x08Semantic\x12\x08\n\x04NONE\x10\x00\x12\x07\n\x03SET\x10\x01\x12\t\n\x05\x41LIAS\x10\x02*\xea\x01\n\x07\x45\x64ition\x12\x13\n\x0f\x45\x44ITION_UNKNOWN\x10\x00\x12\x13\n\x0e\x45\x44ITION_PROTO2\x10\xe6\x07\x12\x13\n\x0e\x45\x44ITION_PROTO3\x10\xe7\x07\x12\x11\n\x0c\x45\x44ITION_2023\x10\xe8\x07\x12\x17\n\x13\x45\x44ITION_1_TEST_ONLY\x10\x01\x12\x17\n\x13\x45\x44ITION_2_TEST_ONLY\x10\x02\x12\x1d\n\x17\x45\x44ITION_99997_TEST_ONLY\x10\x9d\x8d\x06\x12\x1d\n\x17\x45\x44ITION_99998_TEST_ONLY\x10\x9e\x8d\x06\x12\x1d\n\x17\x45\x44ITION_99999_TEST_ONLY\x10\x9f\x8d\x06\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection'
+ )
+else:
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"M\n\x11\x46ileDescriptorSet\x12\x38\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProtoR\x04\x66ile\"\x98\x05\n\x13\x46ileDescriptorProto\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x18\n\x07package\x18\x02 \x01(\tR\x07package\x12\x1e\n\ndependency\x18\x03 \x03(\tR\ndependency\x12+\n\x11public_dependency\x18\n \x03(\x05R\x10publicDependency\x12\'\n\x0fweak_dependency\x18\x0b \x03(\x05R\x0eweakDependency\x12\x43\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProtoR\x0bmessageType\x12\x41\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProtoR\x08\x65numType\x12\x41\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProtoR\x07service\x12\x43\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProtoR\textension\x12\x36\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptionsR\x07options\x12I\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfoR\x0esourceCodeInfo\x12\x16\n\x06syntax\x18\x0c \x01(\tR\x06syntax\x12\x32\n\x07\x65\x64ition\x18\x0e \x01(\x0e\x32\x18.google.protobuf.EditionR\x07\x65\x64ition\"\xb9\x06\n\x0f\x44\x65scriptorProto\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12;\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProtoR\x05\x66ield\x12\x43\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProtoR\textension\x12\x41\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProtoR\nnestedType\x12\x41\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProtoR\x08\x65numType\x12X\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRangeR\x0e\x65xtensionRange\x12\x44\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProtoR\toneofDecl\x12\x39\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptionsR\x07options\x12U\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRangeR\rreservedRange\x12#\n\rreserved_name\x18\n \x03(\tR\x0creservedName\x1az\n\x0e\x45xtensionRange\x12\x14\n\x05start\x18\x01 \x01(\x05R\x05start\x12\x10\n\x03\x65nd\x18\x02 \x01(\x05R\x03\x65nd\x12@\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptionsR\x07options\x1a\x37\n\rReservedRange\x12\x14\n\x05start\x18\x01 \x01(\x05R\x05start\x12\x10\n\x03\x65nd\x18\x02 \x01(\x05R\x03\x65nd\"\xc7\x04\n\x15\x45xtensionRangeOptions\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption\x12Y\n\x0b\x64\x65\x63laration\x18\x02 \x03(\x0b\x32\x32.google.protobuf.ExtensionRangeOptions.DeclarationB\x03\x88\x01\x02R\x0b\x64\x65\x63laration\x12\x37\n\x08\x66\x65\x61tures\x18\x32 \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12h\n\x0cverification\x18\x03 \x01(\x0e\x32\x38.google.protobuf.ExtensionRangeOptions.VerificationState:\nUNVERIFIEDR\x0cverification\x1a\x94\x01\n\x0b\x44\x65\x63laration\x12\x16\n\x06number\x18\x01 \x01(\x05R\x06number\x12\x1b\n\tfull_name\x18\x02 \x01(\tR\x08\x66ullName\x12\x12\n\x04type\x18\x03 \x01(\tR\x04type\x12\x1a\n\x08reserved\x18\x05 \x01(\x08R\x08reserved\x12\x1a\n\x08repeated\x18\x06 \x01(\x08R\x08repeatedJ\x04\x08\x04\x10\x05\"4\n\x11VerificationState\x12\x0f\n\x0b\x44\x45\x43LARATION\x10\x00\x12\x0e\n\nUNVERIFIED\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xc1\x06\n\x14\x46ieldDescriptorProto\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x16\n\x06number\x18\x03 \x01(\x05R\x06number\x12\x41\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.LabelR\x05label\x12>\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.TypeR\x04type\x12\x1b\n\ttype_name\x18\x06 \x01(\tR\x08typeName\x12\x1a\n\x08\x65xtendee\x18\x02 \x01(\tR\x08\x65xtendee\x12#\n\rdefault_value\x18\x07 \x01(\tR\x0c\x64\x65\x66\x61ultValue\x12\x1f\n\x0boneof_index\x18\t \x01(\x05R\noneofIndex\x12\x1b\n\tjson_name\x18\n \x01(\tR\x08jsonName\x12\x37\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptionsR\x07options\x12\'\n\x0fproto3_optional\x18\x11 \x01(\x08R\x0eproto3Optional\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REPEATED\x10\x03\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\"c\n\x14OneofDescriptorProto\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x37\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptionsR\x07options\"\xe3\x02\n\x13\x45numDescriptorProto\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12?\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProtoR\x05value\x12\x36\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptionsR\x07options\x12]\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRangeR\rreservedRange\x12#\n\rreserved_name\x18\x05 \x03(\tR\x0creservedName\x1a;\n\x11\x45numReservedRange\x12\x14\n\x05start\x18\x01 \x01(\x05R\x05start\x12\x10\n\x03\x65nd\x18\x02 \x01(\x05R\x03\x65nd\"\x83\x01\n\x18\x45numValueDescriptorProto\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x16\n\x06number\x18\x02 \x01(\x05R\x06number\x12;\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptionsR\x07options\"\xa7\x01\n\x16ServiceDescriptorProto\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12>\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProtoR\x06method\x12\x39\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptionsR\x07options\"\x89\x02\n\x15MethodDescriptorProto\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1d\n\ninput_type\x18\x02 \x01(\tR\tinputType\x12\x1f\n\x0boutput_type\x18\x03 \x01(\tR\noutputType\x12\x38\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptionsR\x07options\x12\x30\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lseR\x0f\x63lientStreaming\x12\x30\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lseR\x0fserverStreaming\"\xca\t\n\x0b\x46ileOptions\x12!\n\x0cjava_package\x18\x01 \x01(\tR\x0bjavaPackage\x12\x30\n\x14java_outer_classname\x18\x08 \x01(\tR\x12javaOuterClassname\x12\x35\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lseR\x11javaMultipleFiles\x12\x44\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01R\x19javaGenerateEqualsAndHash\x12:\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lseR\x13javaStringCheckUtf8\x12S\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEEDR\x0boptimizeFor\x12\x1d\n\ngo_package\x18\x0b \x01(\tR\tgoPackage\x12\x35\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lseR\x11\x63\x63GenericServices\x12\x39\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lseR\x13javaGenericServices\x12\x35\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lseR\x11pyGenericServices\x12\x37\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lseR\x12phpGenericServices\x12%\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lseR\ndeprecated\x12.\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04trueR\x0e\x63\x63\x45nableArenas\x12*\n\x11objc_class_prefix\x18$ \x01(\tR\x0fobjcClassPrefix\x12)\n\x10\x63sharp_namespace\x18% \x01(\tR\x0f\x63sharpNamespace\x12!\n\x0cswift_prefix\x18\' \x01(\tR\x0bswiftPrefix\x12(\n\x10php_class_prefix\x18( \x01(\tR\x0ephpClassPrefix\x12#\n\rphp_namespace\x18) \x01(\tR\x0cphpNamespace\x12\x34\n\x16php_metadata_namespace\x18, \x01(\tR\x14phpMetadataNamespace\x12!\n\x0cruby_package\x18- \x01(\tR\x0brubyPackage\x12\x37\n\x08\x66\x65\x61tures\x18\x32 \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\xf4\x03\n\x0eMessageOptions\x12<\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lseR\x14messageSetWireFormat\x12L\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lseR\x1cnoStandardDescriptorAccessor\x12%\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lseR\ndeprecated\x12\x1b\n\tmap_entry\x18\x07 \x01(\x08R\x08mapEntry\x12V\n&deprecated_legacy_json_field_conflicts\x18\x0b \x01(\x08\x42\x02\x18\x01R\"deprecatedLegacyJsonFieldConflicts\x12\x37\n\x08\x66\x65\x61tures\x18\x0c \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xad\n\n\x0c\x46ieldOptions\x12\x41\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRINGR\x05\x63type\x12\x16\n\x06packed\x18\x02 \x01(\x08R\x06packed\x12G\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMALR\x06jstype\x12\x19\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lseR\x04lazy\x12.\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lseR\x0eunverifiedLazy\x12%\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lseR\ndeprecated\x12\x19\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lseR\x04weak\x12(\n\x0c\x64\x65\x62ug_redact\x18\x10 \x01(\x08:\x05\x66\x61lseR\x0b\x64\x65\x62ugRedact\x12K\n\tretention\x18\x11 \x01(\x0e\x32-.google.protobuf.FieldOptions.OptionRetentionR\tretention\x12H\n\x07targets\x18\x13 \x03(\x0e\x32..google.protobuf.FieldOptions.OptionTargetTypeR\x07targets\x12W\n\x10\x65\x64ition_defaults\x18\x14 \x03(\x0b\x32,.google.protobuf.FieldOptions.EditionDefaultR\x0f\x65\x64itionDefaults\x12\x37\n\x08\x66\x65\x61tures\x18\x15 \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption\x1aZ\n\x0e\x45\x64itionDefault\x12\x32\n\x07\x65\x64ition\x18\x03 \x01(\x0e\x32\x18.google.protobuf.EditionR\x07\x65\x64ition\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02\"U\n\x0fOptionRetention\x12\x15\n\x11RETENTION_UNKNOWN\x10\x00\x12\x15\n\x11RETENTION_RUNTIME\x10\x01\x12\x14\n\x10RETENTION_SOURCE\x10\x02\"\x8c\x02\n\x10OptionTargetType\x12\x17\n\x13TARGET_TYPE_UNKNOWN\x10\x00\x12\x14\n\x10TARGET_TYPE_FILE\x10\x01\x12\x1f\n\x1bTARGET_TYPE_EXTENSION_RANGE\x10\x02\x12\x17\n\x13TARGET_TYPE_MESSAGE\x10\x03\x12\x15\n\x11TARGET_TYPE_FIELD\x10\x04\x12\x15\n\x11TARGET_TYPE_ONEOF\x10\x05\x12\x14\n\x10TARGET_TYPE_ENUM\x10\x06\x12\x1a\n\x16TARGET_TYPE_ENUM_ENTRY\x10\x07\x12\x17\n\x13TARGET_TYPE_SERVICE\x10\x08\x12\x16\n\x12TARGET_TYPE_METHOD\x10\t*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x12\x10\x13\"\xac\x01\n\x0cOneofOptions\x12\x37\n\x08\x66\x65\x61tures\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd1\x02\n\x0b\x45numOptions\x12\x1f\n\x0b\x61llow_alias\x18\x02 \x01(\x08R\nallowAlias\x12%\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lseR\ndeprecated\x12V\n&deprecated_legacy_json_field_conflicts\x18\x06 \x01(\x08\x42\x02\x18\x01R\"deprecatedLegacyJsonFieldConflicts\x12\x37\n\x08\x66\x65\x61tures\x18\x07 \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"\x81\x02\n\x10\x45numValueOptions\x12%\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lseR\ndeprecated\x12\x37\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12(\n\x0c\x64\x65\x62ug_redact\x18\x03 \x01(\x08:\x05\x66\x61lseR\x0b\x64\x65\x62ugRedact\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x01\n\x0eServiceOptions\x12\x37\n\x08\x66\x65\x61tures\x18\" \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12%\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lseR\ndeprecated\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x99\x03\n\rMethodOptions\x12%\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lseR\ndeprecated\x12q\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWNR\x10idempotencyLevel\x12\x37\n\x08\x66\x65\x61tures\x18# \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\x12X\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9a\x03\n\x13UninterpretedOption\x12\x41\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePartR\x04name\x12)\n\x10identifier_value\x18\x03 \x01(\tR\x0fidentifierValue\x12,\n\x12positive_int_value\x18\x04 \x01(\x04R\x10positiveIntValue\x12,\n\x12negative_int_value\x18\x05 \x01(\x03R\x10negativeIntValue\x12!\n\x0c\x64ouble_value\x18\x06 \x01(\x01R\x0b\x64oubleValue\x12!\n\x0cstring_value\x18\x07 \x01(\x0cR\x0bstringValue\x12\'\n\x0f\x61ggregate_value\x18\x08 \x01(\tR\x0e\x61ggregateValue\x1aJ\n\x08NamePart\x12\x1b\n\tname_part\x18\x01 \x02(\tR\x08namePart\x12!\n\x0cis_extension\x18\x02 \x02(\x08R\x0bisExtension\"\xfc\t\n\nFeatureSet\x12\x8b\x01\n\x0e\x66ield_presence\x18\x01 \x01(\x0e\x32).google.protobuf.FeatureSet.FieldPresenceB9\x88\x01\x01\x98\x01\x04\x98\x01\x01\xa2\x01\r\x12\x08\x45XPLICIT\x18\xe6\x07\xa2\x01\r\x12\x08IMPLICIT\x18\xe7\x07\xa2\x01\r\x12\x08\x45XPLICIT\x18\xe8\x07R\rfieldPresence\x12\x66\n\tenum_type\x18\x02 \x01(\x0e\x32$.google.protobuf.FeatureSet.EnumTypeB#\x88\x01\x01\x98\x01\x06\x98\x01\x01\xa2\x01\x0b\x12\x06\x43LOSED\x18\xe6\x07\xa2\x01\t\x12\x04OPEN\x18\xe7\x07R\x08\x65numType\x12\x92\x01\n\x17repeated_field_encoding\x18\x03 \x01(\x0e\x32\x31.google.protobuf.FeatureSet.RepeatedFieldEncodingB\'\x88\x01\x01\x98\x01\x04\x98\x01\x01\xa2\x01\r\x12\x08\x45XPANDED\x18\xe6\x07\xa2\x01\x0b\x12\x06PACKED\x18\xe7\x07R\x15repeatedFieldEncoding\x12x\n\x0futf8_validation\x18\x04 \x01(\x0e\x32*.google.protobuf.FeatureSet.Utf8ValidationB#\x88\x01\x01\x98\x01\x04\x98\x01\x01\xa2\x01\t\x12\x04NONE\x18\xe6\x07\xa2\x01\x0b\x12\x06VERIFY\x18\xe7\x07R\x0eutf8Validation\x12x\n\x10message_encoding\x18\x05 \x01(\x0e\x32+.google.protobuf.FeatureSet.MessageEncodingB \x88\x01\x01\x98\x01\x04\x98\x01\x01\xa2\x01\x14\x12\x0fLENGTH_PREFIXED\x18\xe6\x07R\x0fmessageEncoding\x12|\n\x0bjson_format\x18\x06 \x01(\x0e\x32&.google.protobuf.FeatureSet.JsonFormatB3\x88\x01\x01\x98\x01\x03\x98\x01\x06\x98\x01\x01\xa2\x01\x17\x12\x12LEGACY_BEST_EFFORT\x18\xe6\x07\xa2\x01\n\x12\x05\x41LLOW\x18\xe7\x07R\njsonFormat\"\\\n\rFieldPresence\x12\x1a\n\x16\x46IELD_PRESENCE_UNKNOWN\x10\x00\x12\x0c\n\x08\x45XPLICIT\x10\x01\x12\x0c\n\x08IMPLICIT\x10\x02\x12\x13\n\x0fLEGACY_REQUIRED\x10\x03\"7\n\x08\x45numType\x12\x15\n\x11\x45NUM_TYPE_UNKNOWN\x10\x00\x12\x08\n\x04OPEN\x10\x01\x12\n\n\x06\x43LOSED\x10\x02\"V\n\x15RepeatedFieldEncoding\x12#\n\x1fREPEATED_FIELD_ENCODING_UNKNOWN\x10\x00\x12\n\n\x06PACKED\x10\x01\x12\x0c\n\x08\x45XPANDED\x10\x02\"C\n\x0eUtf8Validation\x12\x1b\n\x17UTF8_VALIDATION_UNKNOWN\x10\x00\x12\x08\n\x04NONE\x10\x01\x12\n\n\x06VERIFY\x10\x02\"S\n\x0fMessageEncoding\x12\x1c\n\x18MESSAGE_ENCODING_UNKNOWN\x10\x00\x12\x13\n\x0fLENGTH_PREFIXED\x10\x01\x12\r\n\tDELIMITED\x10\x02\"H\n\nJsonFormat\x12\x17\n\x13JSON_FORMAT_UNKNOWN\x10\x00\x12\t\n\x05\x41LLOW\x10\x01\x12\x16\n\x12LEGACY_BEST_EFFORT\x10\x02*\x06\x08\xe8\x07\x10\xe9\x07*\x06\x08\xe9\x07\x10\xea\x07*\x06\x08\x8bN\x10\x90NJ\x06\x08\xe7\x07\x10\xe8\x07\"\xfe\x02\n\x12\x46\x65\x61tureSetDefaults\x12X\n\x08\x64\x65\x66\x61ults\x18\x01 \x03(\x0b\x32<.google.protobuf.FeatureSetDefaults.FeatureSetEditionDefaultR\x08\x64\x65\x66\x61ults\x12\x41\n\x0fminimum_edition\x18\x04 \x01(\x0e\x32\x18.google.protobuf.EditionR\x0eminimumEdition\x12\x41\n\x0fmaximum_edition\x18\x05 \x01(\x0e\x32\x18.google.protobuf.EditionR\x0emaximumEdition\x1a\x87\x01\n\x18\x46\x65\x61tureSetEditionDefault\x12\x32\n\x07\x65\x64ition\x18\x03 \x01(\x0e\x32\x18.google.protobuf.EditionR\x07\x65\x64ition\x12\x37\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.FeatureSetR\x08\x66\x65\x61tures\"\xa7\x02\n\x0eSourceCodeInfo\x12\x44\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.LocationR\x08location\x1a\xce\x01\n\x08Location\x12\x16\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01R\x04path\x12\x16\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01R\x04span\x12)\n\x10leading_comments\x18\x03 \x01(\tR\x0fleadingComments\x12+\n\x11trailing_comments\x18\x04 \x01(\tR\x10trailingComments\x12:\n\x19leading_detached_comments\x18\x06 \x03(\tR\x17leadingDetachedComments\"\xd0\x02\n\x11GeneratedCodeInfo\x12M\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.AnnotationR\nannotation\x1a\xeb\x01\n\nAnnotation\x12\x16\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01R\x04path\x12\x1f\n\x0bsource_file\x18\x02 \x01(\tR\nsourceFile\x12\x14\n\x05\x62\x65gin\x18\x03 \x01(\x05R\x05\x62\x65gin\x12\x10\n\x03\x65nd\x18\x04 \x01(\x05R\x03\x65nd\x12R\n\x08semantic\x18\x05 \x01(\x0e\x32\x36.google.protobuf.GeneratedCodeInfo.Annotation.SemanticR\x08semantic\"(\n\x08Semantic\x12\x08\n\x04NONE\x10\x00\x12\x07\n\x03SET\x10\x01\x12\t\n\x05\x41LIAS\x10\x02*\xea\x01\n\x07\x45\x64ition\x12\x13\n\x0f\x45\x44ITION_UNKNOWN\x10\x00\x12\x13\n\x0e\x45\x44ITION_PROTO2\x10\xe6\x07\x12\x13\n\x0e\x45\x44ITION_PROTO3\x10\xe7\x07\x12\x11\n\x0c\x45\x44ITION_2023\x10\xe8\x07\x12\x17\n\x13\x45\x44ITION_1_TEST_ONLY\x10\x01\x12\x17\n\x13\x45\x44ITION_2_TEST_ONLY\x10\x02\x12\x1d\n\x17\x45\x44ITION_99997_TEST_ONLY\x10\x9d\x8d\x06\x12\x1d\n\x17\x45\x44ITION_99998_TEST_ONLY\x10\x9e\x8d\x06\x12\x1d\n\x17\x45\x44ITION_99999_TEST_ONLY\x10\x9f\x8d\x06\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection')
+
+_globals = globals()
+if _descriptor._USE_C_DESCRIPTORS == False:
+ _EDITION = _descriptor.EnumDescriptor(
+ name='Edition',
+ full_name='google.protobuf.Edition',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='EDITION_UNKNOWN', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='EDITION_PROTO2', index=1, number=998,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='EDITION_PROTO3', index=2, number=999,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='EDITION_2023', index=3, number=1000,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='EDITION_1_TEST_ONLY', index=4, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='EDITION_2_TEST_ONLY', index=5, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='EDITION_99997_TEST_ONLY', index=6, number=99997,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='EDITION_99998_TEST_ONLY', index=7, number=99998,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='EDITION_99999_TEST_ONLY', index=8, number=99999,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_EDITION)
+
+ _EXTENSIONRANGEOPTIONS_VERIFICATIONSTATE = _descriptor.EnumDescriptor(
+ name='VerificationState',
+ full_name='google.protobuf.ExtensionRangeOptions.VerificationState',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='DECLARATION', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='UNVERIFIED', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_EXTENSIONRANGEOPTIONS_VERIFICATIONSTATE)
+
+ _FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor(
+ name='Type',
+ full_name='google.protobuf.FieldDescriptorProto.Type',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_DOUBLE', index=0, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_FLOAT', index=1, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_INT64', index=2, number=3,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_UINT64', index=3, number=4,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_INT32', index=4, number=5,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_FIXED64', index=5, number=6,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_FIXED32', index=6, number=7,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_BOOL', index=7, number=8,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_STRING', index=8, number=9,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_GROUP', index=9, number=10,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_MESSAGE', index=10, number=11,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_BYTES', index=11, number=12,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_UINT32', index=12, number=13,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_ENUM', index=13, number=14,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_SFIXED32', index=14, number=15,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_SFIXED64', index=15, number=16,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_SINT32', index=16, number=17,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_SINT64', index=17, number=18,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE)
+
+ _FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor(
+ name='Label',
+ full_name='google.protobuf.FieldDescriptorProto.Label',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='LABEL_OPTIONAL', index=0, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='LABEL_REPEATED', index=1, number=3,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='LABEL_REQUIRED', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL)
+
+ _FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor(
+ name='OptimizeMode',
+ full_name='google.protobuf.FileOptions.OptimizeMode',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='SPEED', index=0, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='CODE_SIZE', index=1, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='LITE_RUNTIME', index=2, number=3,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE)
+
+ _FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor(
+ name='CType',
+ full_name='google.protobuf.FieldOptions.CType',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='STRING', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='CORD', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='STRING_PIECE', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE)
+
+ _FIELDOPTIONS_JSTYPE = _descriptor.EnumDescriptor(
+ name='JSType',
+ full_name='google.protobuf.FieldOptions.JSType',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='JS_NORMAL', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='JS_STRING', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='JS_NUMBER', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JSTYPE)
+
+ _FIELDOPTIONS_OPTIONRETENTION = _descriptor.EnumDescriptor(
+ name='OptionRetention',
+ full_name='google.protobuf.FieldOptions.OptionRetention',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='RETENTION_UNKNOWN', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='RETENTION_RUNTIME', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='RETENTION_SOURCE', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_OPTIONRETENTION)
+
+ _FIELDOPTIONS_OPTIONTARGETTYPE = _descriptor.EnumDescriptor(
+ name='OptionTargetType',
+ full_name='google.protobuf.FieldOptions.OptionTargetType',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='TARGET_TYPE_UNKNOWN', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TARGET_TYPE_FILE', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TARGET_TYPE_EXTENSION_RANGE', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TARGET_TYPE_MESSAGE', index=3, number=3,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TARGET_TYPE_FIELD', index=4, number=4,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TARGET_TYPE_ONEOF', index=5, number=5,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TARGET_TYPE_ENUM', index=6, number=6,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TARGET_TYPE_ENUM_ENTRY', index=7, number=7,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TARGET_TYPE_SERVICE', index=8, number=8,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TARGET_TYPE_METHOD', index=9, number=9,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_OPTIONTARGETTYPE)
+
+ _METHODOPTIONS_IDEMPOTENCYLEVEL = _descriptor.EnumDescriptor(
+ name='IdempotencyLevel',
+ full_name='google.protobuf.MethodOptions.IdempotencyLevel',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='IDEMPOTENCY_UNKNOWN', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='NO_SIDE_EFFECTS', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='IDEMPOTENT', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_METHODOPTIONS_IDEMPOTENCYLEVEL)
+
+ _FEATURESET_FIELDPRESENCE = _descriptor.EnumDescriptor(
+ name='FieldPresence',
+ full_name='google.protobuf.FeatureSet.FieldPresence',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='FIELD_PRESENCE_UNKNOWN', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='EXPLICIT', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='IMPLICIT', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='LEGACY_REQUIRED', index=3, number=3,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FEATURESET_FIELDPRESENCE)
+
+ _FEATURESET_ENUMTYPE = _descriptor.EnumDescriptor(
+ name='EnumType',
+ full_name='google.protobuf.FeatureSet.EnumType',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='ENUM_TYPE_UNKNOWN', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='OPEN', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='CLOSED', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FEATURESET_ENUMTYPE)
+
+ _FEATURESET_REPEATEDFIELDENCODING = _descriptor.EnumDescriptor(
+ name='RepeatedFieldEncoding',
+ full_name='google.protobuf.FeatureSet.RepeatedFieldEncoding',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='REPEATED_FIELD_ENCODING_UNKNOWN', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='PACKED', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='EXPANDED', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FEATURESET_REPEATEDFIELDENCODING)
+
+ _FEATURESET_UTF8VALIDATION = _descriptor.EnumDescriptor(
+ name='Utf8Validation',
+ full_name='google.protobuf.FeatureSet.Utf8Validation',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='UTF8_VALIDATION_UNKNOWN', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='NONE', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='VERIFY', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FEATURESET_UTF8VALIDATION)
+
+ _FEATURESET_MESSAGEENCODING = _descriptor.EnumDescriptor(
+ name='MessageEncoding',
+ full_name='google.protobuf.FeatureSet.MessageEncoding',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='MESSAGE_ENCODING_UNKNOWN', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='LENGTH_PREFIXED', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='DELIMITED', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FEATURESET_MESSAGEENCODING)
+
+ _FEATURESET_JSONFORMAT = _descriptor.EnumDescriptor(
+ name='JsonFormat',
+ full_name='google.protobuf.FeatureSet.JsonFormat',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='JSON_FORMAT_UNKNOWN', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='ALLOW', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='LEGACY_BEST_EFFORT', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FEATURESET_JSONFORMAT)
+
+ _GENERATEDCODEINFO_ANNOTATION_SEMANTIC = _descriptor.EnumDescriptor(
+ name='Semantic',
+ full_name='google.protobuf.GeneratedCodeInfo.Annotation.Semantic',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='NONE', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='SET', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='ALIAS', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_GENERATEDCODEINFO_ANNOTATION_SEMANTIC)
+
+
+ _FILEDESCRIPTORSET = _descriptor.Descriptor(
+ name='FileDescriptorSet',
+ full_name='google.protobuf.FileDescriptorSet',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='file', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _FILEDESCRIPTORPROTO = _descriptor.Descriptor(
+ name='FileDescriptorProto',
+ full_name='google.protobuf.FileDescriptorProto',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='package', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2,
+ number=3, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='dependency', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3,
+ number=10, type=5, cpp_type=1, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='publicDependency', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4,
+ number=11, type=5, cpp_type=1, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='weakDependency', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5,
+ number=4, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='messageType', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6,
+ number=5, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='enumType', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7,
+ number=6, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='service', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8,
+ number=7, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='extension', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9,
+ number=8, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='options', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10,
+ number=9, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='sourceCodeInfo', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='syntax', full_name='google.protobuf.FileDescriptorProto.syntax', index=11,
+ number=12, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='syntax', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='edition', full_name='google.protobuf.FileDescriptorProto.edition', index=12,
+ number=14, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='edition', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor(
+ name='ExtensionRange',
+ full_name='google.protobuf.DescriptorProto.ExtensionRange',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0,
+ number=1, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='start', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1,
+ number=2, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='end', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.DescriptorProto.ExtensionRange.options', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='options', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _DESCRIPTORPROTO_RESERVEDRANGE = _descriptor.Descriptor(
+ name='ReservedRange',
+ full_name='google.protobuf.DescriptorProto.ReservedRange',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='start', full_name='google.protobuf.DescriptorProto.ReservedRange.start', index=0,
+ number=1, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='start', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='end', full_name='google.protobuf.DescriptorProto.ReservedRange.end', index=1,
+ number=2, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='end', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _DESCRIPTORPROTO = _descriptor.Descriptor(
+ name='DescriptorProto',
+ full_name='google.protobuf.DescriptorProto',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.DescriptorProto.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='field', full_name='google.protobuf.DescriptorProto.field', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='field', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2,
+ number=6, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='extension', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='nestedType', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4,
+ number=4, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='enumType', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5,
+ number=5, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='extensionRange', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6,
+ number=8, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='oneofDecl', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.DescriptorProto.options', index=7,
+ number=7, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='options', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='reserved_range', full_name='google.protobuf.DescriptorProto.reserved_range', index=8,
+ number=9, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='reservedRange', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='reserved_name', full_name='google.protobuf.DescriptorProto.reserved_name', index=9,
+ number=10, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='reservedName', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, _DESCRIPTORPROTO_RESERVEDRANGE, ],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _EXTENSIONRANGEOPTIONS_DECLARATION = _descriptor.Descriptor(
+ name='Declaration',
+ full_name='google.protobuf.ExtensionRangeOptions.Declaration',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='number', full_name='google.protobuf.ExtensionRangeOptions.Declaration.number', index=0,
+ number=1, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='number', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='full_name', full_name='google.protobuf.ExtensionRangeOptions.Declaration.full_name', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='fullName', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='type', full_name='google.protobuf.ExtensionRangeOptions.Declaration.type', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='type', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='reserved', full_name='google.protobuf.ExtensionRangeOptions.Declaration.reserved', index=3,
+ number=5, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='reserved', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='repeated', full_name='google.protobuf.ExtensionRangeOptions.Declaration.repeated', index=4,
+ number=6, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='repeated', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _EXTENSIONRANGEOPTIONS = _descriptor.Descriptor(
+ name='ExtensionRangeOptions',
+ full_name='google.protobuf.ExtensionRangeOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.ExtensionRangeOptions.uninterpreted_option', index=0,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='uninterpretedOption', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='declaration', full_name='google.protobuf.ExtensionRangeOptions.declaration', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=b'\210\001\002', json_name='declaration', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='features', full_name='google.protobuf.ExtensionRangeOptions.features', index=2,
+ number=50, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='features', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='verification', full_name='google.protobuf.ExtensionRangeOptions.verification', index=3,
+ number=3, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='verification', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[_EXTENSIONRANGEOPTIONS_DECLARATION, ],
+ enum_types=[
+ _EXTENSIONRANGEOPTIONS_VERIFICATIONSTATE,
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _FIELDDESCRIPTORPROTO = _descriptor.Descriptor(
+ name='FieldDescriptorProto',
+ full_name='google.protobuf.FieldDescriptorProto',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1,
+ number=3, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='number', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2,
+ number=4, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='label', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3,
+ number=5, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='type', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4,
+ number=6, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='typeName', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='extendee', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6,
+ number=7, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='defaultValue', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7,
+ number=9, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='oneofIndex', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='json_name', full_name='google.protobuf.FieldDescriptorProto.json_name', index=8,
+ number=10, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='jsonName', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=9,
+ number=8, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='options', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='proto3_optional', full_name='google.protobuf.FieldDescriptorProto.proto3_optional', index=10,
+ number=17, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='proto3Optional', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _FIELDDESCRIPTORPROTO_TYPE,
+ _FIELDDESCRIPTORPROTO_LABEL,
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _ONEOFDESCRIPTORPROTO = _descriptor.Descriptor(
+ name='OneofDescriptorProto',
+ full_name='google.protobuf.OneofDescriptorProto',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.OneofDescriptorProto.options', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='options', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE = _descriptor.Descriptor(
+ name='EnumReservedRange',
+ full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='start', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.start', index=0,
+ number=1, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='start', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='end', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.end', index=1,
+ number=2, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='end', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _ENUMDESCRIPTORPROTO = _descriptor.Descriptor(
+ name='EnumDescriptorProto',
+ full_name='google.protobuf.EnumDescriptorProto',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='value', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='options', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='reserved_range', full_name='google.protobuf.EnumDescriptorProto.reserved_range', index=3,
+ number=4, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='reservedRange', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='reserved_name', full_name='google.protobuf.EnumDescriptorProto.reserved_name', index=4,
+ number=5, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='reservedName', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[_ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE, ],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor(
+ name='EnumValueDescriptorProto',
+ full_name='google.protobuf.EnumValueDescriptorProto',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1,
+ number=2, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='number', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='options', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _SERVICEDESCRIPTORPROTO = _descriptor.Descriptor(
+ name='ServiceDescriptorProto',
+ full_name='google.protobuf.ServiceDescriptorProto',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='method', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='options', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _METHODDESCRIPTORPROTO = _descriptor.Descriptor(
+ name='MethodDescriptorProto',
+ full_name='google.protobuf.MethodDescriptorProto',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='inputType', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='outputType', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='options', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='client_streaming', full_name='google.protobuf.MethodDescriptorProto.client_streaming', index=4,
+ number=5, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='clientStreaming', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='server_streaming', full_name='google.protobuf.MethodDescriptorProto.server_streaming', index=5,
+ number=6, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='serverStreaming', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _FILEOPTIONS = _descriptor.Descriptor(
+ name='FileOptions',
+ full_name='google.protobuf.FileOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='javaPackage', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1,
+ number=8, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='javaOuterClassname', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2,
+ number=10, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='javaMultipleFiles', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3,
+ number=20, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=b'\030\001', json_name='javaGenerateEqualsAndHash', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4,
+ number=27, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='javaStringCheckUtf8', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5,
+ number=9, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='optimizeFor', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6,
+ number=11, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='goPackage', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7,
+ number=16, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='ccGenericServices', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8,
+ number=17, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='javaGenericServices', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9,
+ number=18, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='pyGenericServices', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='php_generic_services', full_name='google.protobuf.FileOptions.php_generic_services', index=10,
+ number=42, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='phpGenericServices', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=11,
+ number=23, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='deprecated', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='cc_enable_arenas', full_name='google.protobuf.FileOptions.cc_enable_arenas', index=12,
+ number=31, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=True,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='ccEnableArenas', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='objc_class_prefix', full_name='google.protobuf.FileOptions.objc_class_prefix', index=13,
+ number=36, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='objcClassPrefix', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='csharp_namespace', full_name='google.protobuf.FileOptions.csharp_namespace', index=14,
+ number=37, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='csharpNamespace', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='swift_prefix', full_name='google.protobuf.FileOptions.swift_prefix', index=15,
+ number=39, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='swiftPrefix', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='php_class_prefix', full_name='google.protobuf.FileOptions.php_class_prefix', index=16,
+ number=40, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='phpClassPrefix', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='php_namespace', full_name='google.protobuf.FileOptions.php_namespace', index=17,
+ number=41, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='phpNamespace', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='php_metadata_namespace', full_name='google.protobuf.FileOptions.php_metadata_namespace', index=18,
+ number=44, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='phpMetadataNamespace', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='ruby_package', full_name='google.protobuf.FileOptions.ruby_package', index=19,
+ number=45, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='rubyPackage', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='features', full_name='google.protobuf.FileOptions.features', index=20,
+ number=50, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='features', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=21,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='uninterpretedOption', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _FILEOPTIONS_OPTIMIZEMODE,
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _MESSAGEOPTIONS = _descriptor.Descriptor(
+ name='MessageOptions',
+ full_name='google.protobuf.MessageOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0,
+ number=1, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='messageSetWireFormat', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1,
+ number=2, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='noStandardDescriptorAccessor', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2,
+ number=3, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='deprecated', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='map_entry', full_name='google.protobuf.MessageOptions.map_entry', index=3,
+ number=7, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='mapEntry', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='deprecated_legacy_json_field_conflicts', full_name='google.protobuf.MessageOptions.deprecated_legacy_json_field_conflicts', index=4,
+ number=11, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=b'\030\001', json_name='deprecatedLegacyJsonFieldConflicts', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='features', full_name='google.protobuf.MessageOptions.features', index=5,
+ number=12, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='features', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=6,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='uninterpretedOption', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _FIELDOPTIONS_EDITIONDEFAULT = _descriptor.Descriptor(
+ name='EditionDefault',
+ full_name='google.protobuf.FieldOptions.EditionDefault',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='edition', full_name='google.protobuf.FieldOptions.EditionDefault.edition', index=0,
+ number=3, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='edition', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='google.protobuf.FieldOptions.EditionDefault.value', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='value', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _FIELDOPTIONS = _descriptor.Descriptor(
+ name='FieldOptions',
+ full_name='google.protobuf.FieldOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0,
+ number=1, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='ctype', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='packed', full_name='google.protobuf.FieldOptions.packed', index=1,
+ number=2, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='packed', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='jstype', full_name='google.protobuf.FieldOptions.jstype', index=2,
+ number=6, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='jstype', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=3,
+ number=5, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='lazy', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='unverified_lazy', full_name='google.protobuf.FieldOptions.unverified_lazy', index=4,
+ number=15, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='unverifiedLazy', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=5,
+ number=3, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='deprecated', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='weak', full_name='google.protobuf.FieldOptions.weak', index=6,
+ number=10, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='weak', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='debug_redact', full_name='google.protobuf.FieldOptions.debug_redact', index=7,
+ number=16, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='debugRedact', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='retention', full_name='google.protobuf.FieldOptions.retention', index=8,
+ number=17, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='retention', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='targets', full_name='google.protobuf.FieldOptions.targets', index=9,
+ number=19, type=14, cpp_type=8, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='targets', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='edition_defaults', full_name='google.protobuf.FieldOptions.edition_defaults', index=10,
+ number=20, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='editionDefaults', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='features', full_name='google.protobuf.FieldOptions.features', index=11,
+ number=21, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='features', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=12,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='uninterpretedOption', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[_FIELDOPTIONS_EDITIONDEFAULT, ],
+ enum_types=[
+ _FIELDOPTIONS_CTYPE,
+ _FIELDOPTIONS_JSTYPE,
+ _FIELDOPTIONS_OPTIONRETENTION,
+ _FIELDOPTIONS_OPTIONTARGETTYPE,
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _ONEOFOPTIONS = _descriptor.Descriptor(
+ name='OneofOptions',
+ full_name='google.protobuf.OneofOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='features', full_name='google.protobuf.OneofOptions.features', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='features', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.OneofOptions.uninterpreted_option', index=1,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='uninterpretedOption', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _ENUMOPTIONS = _descriptor.Descriptor(
+ name='EnumOptions',
+ full_name='google.protobuf.EnumOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0,
+ number=2, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='allowAlias', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1,
+ number=3, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='deprecated', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='deprecated_legacy_json_field_conflicts', full_name='google.protobuf.EnumOptions.deprecated_legacy_json_field_conflicts', index=2,
+ number=6, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=b'\030\001', json_name='deprecatedLegacyJsonFieldConflicts', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='features', full_name='google.protobuf.EnumOptions.features', index=3,
+ number=7, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='features', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=4,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='uninterpretedOption', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _ENUMVALUEOPTIONS = _descriptor.Descriptor(
+ name='EnumValueOptions',
+ full_name='google.protobuf.EnumValueOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0,
+ number=1, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='deprecated', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='features', full_name='google.protobuf.EnumValueOptions.features', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='features', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='debug_redact', full_name='google.protobuf.EnumValueOptions.debug_redact', index=2,
+ number=3, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='debugRedact', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=3,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='uninterpretedOption', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _SERVICEOPTIONS = _descriptor.Descriptor(
+ name='ServiceOptions',
+ full_name='google.protobuf.ServiceOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='features', full_name='google.protobuf.ServiceOptions.features', index=0,
+ number=34, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='features', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=1,
+ number=33, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='deprecated', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=2,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='uninterpretedOption', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _METHODOPTIONS = _descriptor.Descriptor(
+ name='MethodOptions',
+ full_name='google.protobuf.MethodOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0,
+ number=33, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='deprecated', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='idempotency_level', full_name='google.protobuf.MethodOptions.idempotency_level', index=1,
+ number=34, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='idempotencyLevel', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='features', full_name='google.protobuf.MethodOptions.features', index=2,
+ number=35, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='features', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=3,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='uninterpretedOption', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _METHODOPTIONS_IDEMPOTENCYLEVEL,
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor(
+ name='NamePart',
+ full_name='google.protobuf.UninterpretedOption.NamePart',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0,
+ number=1, type=9, cpp_type=9, label=2,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='namePart', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1,
+ number=2, type=8, cpp_type=7, label=2,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='isExtension', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _UNINTERPRETEDOPTION = _descriptor.Descriptor(
+ name='UninterpretedOption',
+ full_name='google.protobuf.UninterpretedOption',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.UninterpretedOption.name', index=0,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='identifierValue', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2,
+ number=4, type=4, cpp_type=4, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='positiveIntValue', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3,
+ number=5, type=3, cpp_type=2, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='negativeIntValue', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4,
+ number=6, type=1, cpp_type=5, label=1,
+ has_default_value=False, default_value=float(0),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='doubleValue', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5,
+ number=7, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"",
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='stringValue', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6,
+ number=8, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='aggregateValue', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _FEATURESET = _descriptor.Descriptor(
+ name='FeatureSet',
+ full_name='google.protobuf.FeatureSet',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='field_presence', full_name='google.protobuf.FeatureSet.field_presence', index=0,
+ number=1, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=b'\210\001\001\230\001\004\230\001\001\242\001\r\022\010EXPLICIT\030\346\007\242\001\r\022\010IMPLICIT\030\347\007\242\001\r\022\010EXPLICIT\030\350\007', json_name='fieldPresence', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='enum_type', full_name='google.protobuf.FeatureSet.enum_type', index=1,
+ number=2, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=b'\210\001\001\230\001\006\230\001\001\242\001\013\022\006CLOSED\030\346\007\242\001\t\022\004OPEN\030\347\007', json_name='enumType', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='repeated_field_encoding', full_name='google.protobuf.FeatureSet.repeated_field_encoding', index=2,
+ number=3, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=b'\210\001\001\230\001\004\230\001\001\242\001\r\022\010EXPANDED\030\346\007\242\001\013\022\006PACKED\030\347\007', json_name='repeatedFieldEncoding', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='utf8_validation', full_name='google.protobuf.FeatureSet.utf8_validation', index=3,
+ number=4, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=b'\210\001\001\230\001\004\230\001\001\242\001\t\022\004NONE\030\346\007\242\001\013\022\006VERIFY\030\347\007', json_name='utf8Validation', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='message_encoding', full_name='google.protobuf.FeatureSet.message_encoding', index=4,
+ number=5, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=b'\210\001\001\230\001\004\230\001\001\242\001\024\022\017LENGTH_PREFIXED\030\346\007', json_name='messageEncoding', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='json_format', full_name='google.protobuf.FeatureSet.json_format', index=5,
+ number=6, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=b'\210\001\001\230\001\003\230\001\006\230\001\001\242\001\027\022\022LEGACY_BEST_EFFORT\030\346\007\242\001\n\022\005ALLOW\030\347\007', json_name='jsonFormat', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _FEATURESET_FIELDPRESENCE,
+ _FEATURESET_ENUMTYPE,
+ _FEATURESET_REPEATEDFIELDENCODING,
+ _FEATURESET_UTF8VALIDATION,
+ _FEATURESET_MESSAGEENCODING,
+ _FEATURESET_JSONFORMAT,
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 1001), (1001, 1002), (9995, 10000), ],
+ oneofs=[
+ ],
+ )
+
+
+ _FEATURESETDEFAULTS_FEATURESETEDITIONDEFAULT = _descriptor.Descriptor(
+ name='FeatureSetEditionDefault',
+ full_name='google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='edition', full_name='google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.edition', index=0,
+ number=3, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='edition', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='features', full_name='google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.features', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='features', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _FEATURESETDEFAULTS = _descriptor.Descriptor(
+ name='FeatureSetDefaults',
+ full_name='google.protobuf.FeatureSetDefaults',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='defaults', full_name='google.protobuf.FeatureSetDefaults.defaults', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='defaults', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='minimum_edition', full_name='google.protobuf.FeatureSetDefaults.minimum_edition', index=1,
+ number=4, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='minimumEdition', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='maximum_edition', full_name='google.protobuf.FeatureSetDefaults.maximum_edition', index=2,
+ number=5, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='maximumEdition', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[_FEATURESETDEFAULTS_FEATURESETEDITIONDEFAULT, ],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _SOURCECODEINFO_LOCATION = _descriptor.Descriptor(
+ name='Location',
+ full_name='google.protobuf.SourceCodeInfo.Location',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0,
+ number=1, type=5, cpp_type=1, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=b'\020\001', json_name='path', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1,
+ number=2, type=5, cpp_type=1, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=b'\020\001', json_name='span', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='leadingComments', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3,
+ number=4, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='trailingComments', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='leading_detached_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_detached_comments', index=4,
+ number=6, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='leadingDetachedComments', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _SOURCECODEINFO = _descriptor.Descriptor(
+ name='SourceCodeInfo',
+ full_name='google.protobuf.SourceCodeInfo',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='location', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[_SOURCECODEINFO_LOCATION, ],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _GENERATEDCODEINFO_ANNOTATION = _descriptor.Descriptor(
+ name='Annotation',
+ full_name='google.protobuf.GeneratedCodeInfo.Annotation',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='path', full_name='google.protobuf.GeneratedCodeInfo.Annotation.path', index=0,
+ number=1, type=5, cpp_type=1, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=b'\020\001', json_name='path', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='source_file', full_name='google.protobuf.GeneratedCodeInfo.Annotation.source_file', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='sourceFile', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='begin', full_name='google.protobuf.GeneratedCodeInfo.Annotation.begin', index=2,
+ number=3, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='begin', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='end', full_name='google.protobuf.GeneratedCodeInfo.Annotation.end', index=3,
+ number=4, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='end', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='semantic', full_name='google.protobuf.GeneratedCodeInfo.Annotation.semantic', index=4,
+ number=5, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='semantic', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _GENERATEDCODEINFO_ANNOTATION_SEMANTIC,
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _GENERATEDCODEINFO = _descriptor.Descriptor(
+ name='GeneratedCodeInfo',
+ full_name='google.protobuf.GeneratedCodeInfo',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='annotation', full_name='google.protobuf.GeneratedCodeInfo.annotation', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, json_name='annotation', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[_GENERATEDCODEINFO_ANNOTATION, ],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO
+ _FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO
+ _FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
+ _FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO
+ _FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
+ _FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS
+ _FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO
+ _FILEDESCRIPTORPROTO.fields_by_name['edition'].enum_type = _EDITION
+ _DESCRIPTORPROTO_EXTENSIONRANGE.fields_by_name['options'].message_type = _EXTENSIONRANGEOPTIONS
+ _DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO
+ _DESCRIPTORPROTO_RESERVEDRANGE.containing_type = _DESCRIPTORPROTO
+ _DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO
+ _DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
+ _DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO
+ _DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
+ _DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE
+ _DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO
+ _DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS
+ _DESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _DESCRIPTORPROTO_RESERVEDRANGE
+ _EXTENSIONRANGEOPTIONS_DECLARATION.containing_type = _EXTENSIONRANGEOPTIONS
+ _EXTENSIONRANGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _EXTENSIONRANGEOPTIONS.fields_by_name['declaration'].message_type = _EXTENSIONRANGEOPTIONS_DECLARATION
+ _EXTENSIONRANGEOPTIONS.fields_by_name['features'].message_type = _FEATURESET
+ _EXTENSIONRANGEOPTIONS.fields_by_name['verification'].enum_type = _EXTENSIONRANGEOPTIONS_VERIFICATIONSTATE
+ _EXTENSIONRANGEOPTIONS_VERIFICATIONSTATE.containing_type = _EXTENSIONRANGEOPTIONS
+ _FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL
+ _FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE
+ _FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS
+ _FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO
+ _FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO
+ _ONEOFDESCRIPTORPROTO.fields_by_name['options'].message_type = _ONEOFOPTIONS
+ _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE.containing_type = _ENUMDESCRIPTORPROTO
+ _ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO
+ _ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS
+ _ENUMDESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE
+ _ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS
+ _SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO
+ _SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS
+ _METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS
+ _FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE
+ _FILEOPTIONS.fields_by_name['features'].message_type = _FEATURESET
+ _FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS
+ _MESSAGEOPTIONS.fields_by_name['features'].message_type = _FEATURESET
+ _MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _FIELDOPTIONS_EDITIONDEFAULT.fields_by_name['edition'].enum_type = _EDITION
+ _FIELDOPTIONS_EDITIONDEFAULT.containing_type = _FIELDOPTIONS
+ _FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE
+ _FIELDOPTIONS.fields_by_name['jstype'].enum_type = _FIELDOPTIONS_JSTYPE
+ _FIELDOPTIONS.fields_by_name['retention'].enum_type = _FIELDOPTIONS_OPTIONRETENTION
+ _FIELDOPTIONS.fields_by_name['targets'].enum_type = _FIELDOPTIONS_OPTIONTARGETTYPE
+ _FIELDOPTIONS.fields_by_name['edition_defaults'].message_type = _FIELDOPTIONS_EDITIONDEFAULT
+ _FIELDOPTIONS.fields_by_name['features'].message_type = _FEATURESET
+ _FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS
+ _FIELDOPTIONS_JSTYPE.containing_type = _FIELDOPTIONS
+ _FIELDOPTIONS_OPTIONRETENTION.containing_type = _FIELDOPTIONS
+ _FIELDOPTIONS_OPTIONTARGETTYPE.containing_type = _FIELDOPTIONS
+ _ONEOFOPTIONS.fields_by_name['features'].message_type = _FEATURESET
+ _ONEOFOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _ENUMOPTIONS.fields_by_name['features'].message_type = _FEATURESET
+ _ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _ENUMVALUEOPTIONS.fields_by_name['features'].message_type = _FEATURESET
+ _ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _SERVICEOPTIONS.fields_by_name['features'].message_type = _FEATURESET
+ _SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _METHODOPTIONS.fields_by_name['idempotency_level'].enum_type = _METHODOPTIONS_IDEMPOTENCYLEVEL
+ _METHODOPTIONS.fields_by_name['features'].message_type = _FEATURESET
+ _METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _METHODOPTIONS_IDEMPOTENCYLEVEL.containing_type = _METHODOPTIONS
+ _UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION
+ _UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART
+ _FEATURESET.fields_by_name['field_presence'].enum_type = _FEATURESET_FIELDPRESENCE
+ _FEATURESET.fields_by_name['enum_type'].enum_type = _FEATURESET_ENUMTYPE
+ _FEATURESET.fields_by_name['repeated_field_encoding'].enum_type = _FEATURESET_REPEATEDFIELDENCODING
+ _FEATURESET.fields_by_name['utf8_validation'].enum_type = _FEATURESET_UTF8VALIDATION
+ _FEATURESET.fields_by_name['message_encoding'].enum_type = _FEATURESET_MESSAGEENCODING
+ _FEATURESET.fields_by_name['json_format'].enum_type = _FEATURESET_JSONFORMAT
+ _FEATURESET_FIELDPRESENCE.containing_type = _FEATURESET
+ _FEATURESET_ENUMTYPE.containing_type = _FEATURESET
+ _FEATURESET_REPEATEDFIELDENCODING.containing_type = _FEATURESET
+ _FEATURESET_UTF8VALIDATION.containing_type = _FEATURESET
+ _FEATURESET_MESSAGEENCODING.containing_type = _FEATURESET
+ _FEATURESET_JSONFORMAT.containing_type = _FEATURESET
+ _FEATURESETDEFAULTS_FEATURESETEDITIONDEFAULT.fields_by_name['edition'].enum_type = _EDITION
+ _FEATURESETDEFAULTS_FEATURESETEDITIONDEFAULT.fields_by_name['features'].message_type = _FEATURESET
+ _FEATURESETDEFAULTS_FEATURESETEDITIONDEFAULT.containing_type = _FEATURESETDEFAULTS
+ _FEATURESETDEFAULTS.fields_by_name['defaults'].message_type = _FEATURESETDEFAULTS_FEATURESETEDITIONDEFAULT
+ _FEATURESETDEFAULTS.fields_by_name['minimum_edition'].enum_type = _EDITION
+ _FEATURESETDEFAULTS.fields_by_name['maximum_edition'].enum_type = _EDITION
+ _SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO
+ _SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION
+ _GENERATEDCODEINFO_ANNOTATION.fields_by_name['semantic'].enum_type = _GENERATEDCODEINFO_ANNOTATION_SEMANTIC
+ _GENERATEDCODEINFO_ANNOTATION.containing_type = _GENERATEDCODEINFO
+ _GENERATEDCODEINFO_ANNOTATION_SEMANTIC.containing_type = _GENERATEDCODEINFO_ANNOTATION
+ _GENERATEDCODEINFO.fields_by_name['annotation'].message_type = _GENERATEDCODEINFO_ANNOTATION
+ DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET
+ DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO
+ DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO
+ DESCRIPTOR.message_types_by_name['ExtensionRangeOptions'] = _EXTENSIONRANGEOPTIONS
+ DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO
+ DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO
+ DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO
+ DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO
+ DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO
+ DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO
+ DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS
+ DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS
+ DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS
+ DESCRIPTOR.message_types_by_name['OneofOptions'] = _ONEOFOPTIONS
+ DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS
+ DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS
+ DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS
+ DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS
+ DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION
+ DESCRIPTOR.message_types_by_name['FeatureSet'] = _FEATURESET
+ DESCRIPTOR.message_types_by_name['FeatureSetDefaults'] = _FEATURESETDEFAULTS
+ DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO
+ DESCRIPTOR.message_types_by_name['GeneratedCodeInfo'] = _GENERATEDCODEINFO
+ DESCRIPTOR.enum_types_by_name['Edition'] = _EDITION
+ _sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+else:
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.descriptor_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ _globals['DESCRIPTOR']._options = None
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\020DescriptorProtosH\001Z-google.golang.org/protobuf/types/descriptorpb\370\001\001\242\002\003GPB\252\002\032Google.Protobuf.Reflection'
+ _globals['_EXTENSIONRANGEOPTIONS'].fields_by_name['declaration']._options = None
+ _globals['_EXTENSIONRANGEOPTIONS'].fields_by_name['declaration']._serialized_options = b'\210\001\002'
+ _globals['_FILEOPTIONS'].fields_by_name['java_generate_equals_and_hash']._options = None
+ _globals['_FILEOPTIONS'].fields_by_name['java_generate_equals_and_hash']._serialized_options = b'\030\001'
+ _globals['_MESSAGEOPTIONS'].fields_by_name['deprecated_legacy_json_field_conflicts']._options = None
+ _globals['_MESSAGEOPTIONS'].fields_by_name['deprecated_legacy_json_field_conflicts']._serialized_options = b'\030\001'
+ _globals['_ENUMOPTIONS'].fields_by_name['deprecated_legacy_json_field_conflicts']._options = None
+ _globals['_ENUMOPTIONS'].fields_by_name['deprecated_legacy_json_field_conflicts']._serialized_options = b'\030\001'
+ _globals['_FEATURESET'].fields_by_name['field_presence']._options = None
+ _globals['_FEATURESET'].fields_by_name['field_presence']._serialized_options = b'\210\001\001\230\001\004\230\001\001\242\001\r\022\010EXPLICIT\030\346\007\242\001\r\022\010IMPLICIT\030\347\007\242\001\r\022\010EXPLICIT\030\350\007'
+ _globals['_FEATURESET'].fields_by_name['enum_type']._options = None
+ _globals['_FEATURESET'].fields_by_name['enum_type']._serialized_options = b'\210\001\001\230\001\006\230\001\001\242\001\013\022\006CLOSED\030\346\007\242\001\t\022\004OPEN\030\347\007'
+ _globals['_FEATURESET'].fields_by_name['repeated_field_encoding']._options = None
+ _globals['_FEATURESET'].fields_by_name['repeated_field_encoding']._serialized_options = b'\210\001\001\230\001\004\230\001\001\242\001\r\022\010EXPANDED\030\346\007\242\001\013\022\006PACKED\030\347\007'
+ _globals['_FEATURESET'].fields_by_name['utf8_validation']._options = None
+ _globals['_FEATURESET'].fields_by_name['utf8_validation']._serialized_options = b'\210\001\001\230\001\004\230\001\001\242\001\t\022\004NONE\030\346\007\242\001\013\022\006VERIFY\030\347\007'
+ _globals['_FEATURESET'].fields_by_name['message_encoding']._options = None
+ _globals['_FEATURESET'].fields_by_name['message_encoding']._serialized_options = b'\210\001\001\230\001\004\230\001\001\242\001\024\022\017LENGTH_PREFIXED\030\346\007'
+ _globals['_FEATURESET'].fields_by_name['json_format']._options = None
+ _globals['_FEATURESET'].fields_by_name['json_format']._serialized_options = b'\210\001\001\230\001\003\230\001\006\230\001\001\242\001\027\022\022LEGACY_BEST_EFFORT\030\346\007\242\001\n\022\005ALLOW\030\347\007'
+ _globals['_SOURCECODEINFO_LOCATION'].fields_by_name['path']._options = None
+ _globals['_SOURCECODEINFO_LOCATION'].fields_by_name['path']._serialized_options = b'\020\001'
+ _globals['_SOURCECODEINFO_LOCATION'].fields_by_name['span']._options = None
+ _globals['_SOURCECODEINFO_LOCATION'].fields_by_name['span']._serialized_options = b'\020\001'
+ _globals['_GENERATEDCODEINFO_ANNOTATION'].fields_by_name['path']._options = None
+ _globals['_GENERATEDCODEINFO_ANNOTATION'].fields_by_name['path']._serialized_options = b'\020\001'
+ _globals['_EDITION']._serialized_start=11258
+ _globals['_EDITION']._serialized_end=11492
+ _globals['_FILEDESCRIPTORSET']._serialized_start=53
+ _globals['_FILEDESCRIPTORSET']._serialized_end=130
+ _globals['_FILEDESCRIPTORPROTO']._serialized_start=133
+ _globals['_FILEDESCRIPTORPROTO']._serialized_end=797
+ _globals['_DESCRIPTORPROTO']._serialized_start=800
+ _globals['_DESCRIPTORPROTO']._serialized_end=1625
+ _globals['_DESCRIPTORPROTO_EXTENSIONRANGE']._serialized_start=1446
+ _globals['_DESCRIPTORPROTO_EXTENSIONRANGE']._serialized_end=1568
+ _globals['_DESCRIPTORPROTO_RESERVEDRANGE']._serialized_start=1570
+ _globals['_DESCRIPTORPROTO_RESERVEDRANGE']._serialized_end=1625
+ _globals['_EXTENSIONRANGEOPTIONS']._serialized_start=1628
+ _globals['_EXTENSIONRANGEOPTIONS']._serialized_end=2211
+ _globals['_EXTENSIONRANGEOPTIONS_DECLARATION']._serialized_start=1998
+ _globals['_EXTENSIONRANGEOPTIONS_DECLARATION']._serialized_end=2146
+ _globals['_EXTENSIONRANGEOPTIONS_VERIFICATIONSTATE']._serialized_start=2148
+ _globals['_EXTENSIONRANGEOPTIONS_VERIFICATIONSTATE']._serialized_end=2200
+ _globals['_FIELDDESCRIPTORPROTO']._serialized_start=2214
+ _globals['_FIELDDESCRIPTORPROTO']._serialized_end=3047
+ _globals['_FIELDDESCRIPTORPROTO_TYPE']._serialized_start=2668
+ _globals['_FIELDDESCRIPTORPROTO_TYPE']._serialized_end=2978
+ _globals['_FIELDDESCRIPTORPROTO_LABEL']._serialized_start=2980
+ _globals['_FIELDDESCRIPTORPROTO_LABEL']._serialized_end=3047
+ _globals['_ONEOFDESCRIPTORPROTO']._serialized_start=3049
+ _globals['_ONEOFDESCRIPTORPROTO']._serialized_end=3148
+ _globals['_ENUMDESCRIPTORPROTO']._serialized_start=3151
+ _globals['_ENUMDESCRIPTORPROTO']._serialized_end=3506
+ _globals['_ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE']._serialized_start=3447
+ _globals['_ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE']._serialized_end=3506
+ _globals['_ENUMVALUEDESCRIPTORPROTO']._serialized_start=3509
+ _globals['_ENUMVALUEDESCRIPTORPROTO']._serialized_end=3640
+ _globals['_SERVICEDESCRIPTORPROTO']._serialized_start=3643
+ _globals['_SERVICEDESCRIPTORPROTO']._serialized_end=3810
+ _globals['_METHODDESCRIPTORPROTO']._serialized_start=3813
+ _globals['_METHODDESCRIPTORPROTO']._serialized_end=4078
+ _globals['_FILEOPTIONS']._serialized_start=4081
+ _globals['_FILEOPTIONS']._serialized_end=5307
+ _globals['_FILEOPTIONS_OPTIMIZEMODE']._serialized_start=5232
+ _globals['_FILEOPTIONS_OPTIMIZEMODE']._serialized_end=5290
+ _globals['_MESSAGEOPTIONS']._serialized_start=5310
+ _globals['_MESSAGEOPTIONS']._serialized_end=5810
+ _globals['_FIELDOPTIONS']._serialized_start=5813
+ _globals['_FIELDOPTIONS']._serialized_end=7138
+ _globals['_FIELDOPTIONS_EDITIONDEFAULT']._serialized_start=6563
+ _globals['_FIELDOPTIONS_EDITIONDEFAULT']._serialized_end=6653
+ _globals['_FIELDOPTIONS_CTYPE']._serialized_start=6655
+ _globals['_FIELDOPTIONS_CTYPE']._serialized_end=6702
+ _globals['_FIELDOPTIONS_JSTYPE']._serialized_start=6704
+ _globals['_FIELDOPTIONS_JSTYPE']._serialized_end=6757
+ _globals['_FIELDOPTIONS_OPTIONRETENTION']._serialized_start=6759
+ _globals['_FIELDOPTIONS_OPTIONRETENTION']._serialized_end=6844
+ _globals['_FIELDOPTIONS_OPTIONTARGETTYPE']._serialized_start=6847
+ _globals['_FIELDOPTIONS_OPTIONTARGETTYPE']._serialized_end=7115
+ _globals['_ONEOFOPTIONS']._serialized_start=7141
+ _globals['_ONEOFOPTIONS']._serialized_end=7313
+ _globals['_ENUMOPTIONS']._serialized_start=7316
+ _globals['_ENUMOPTIONS']._serialized_end=7653
+ _globals['_ENUMVALUEOPTIONS']._serialized_start=7656
+ _globals['_ENUMVALUEOPTIONS']._serialized_end=7913
+ _globals['_SERVICEOPTIONS']._serialized_start=7916
+ _globals['_SERVICEOPTIONS']._serialized_end=8129
+ _globals['_METHODOPTIONS']._serialized_start=8132
+ _globals['_METHODOPTIONS']._serialized_end=8541
+ _globals['_METHODOPTIONS_IDEMPOTENCYLEVEL']._serialized_start=8450
+ _globals['_METHODOPTIONS_IDEMPOTENCYLEVEL']._serialized_end=8530
+ _globals['_UNINTERPRETEDOPTION']._serialized_start=8544
+ _globals['_UNINTERPRETEDOPTION']._serialized_end=8954
+ _globals['_UNINTERPRETEDOPTION_NAMEPART']._serialized_start=8880
+ _globals['_UNINTERPRETEDOPTION_NAMEPART']._serialized_end=8954
+ _globals['_FEATURESET']._serialized_start=8957
+ _globals['_FEATURESET']._serialized_end=10233
+ _globals['_FEATURESET_FIELDPRESENCE']._serialized_start=9736
+ _globals['_FEATURESET_FIELDPRESENCE']._serialized_end=9828
+ _globals['_FEATURESET_ENUMTYPE']._serialized_start=9830
+ _globals['_FEATURESET_ENUMTYPE']._serialized_end=9885
+ _globals['_FEATURESET_REPEATEDFIELDENCODING']._serialized_start=9887
+ _globals['_FEATURESET_REPEATEDFIELDENCODING']._serialized_end=9973
+ _globals['_FEATURESET_UTF8VALIDATION']._serialized_start=9975
+ _globals['_FEATURESET_UTF8VALIDATION']._serialized_end=10042
+ _globals['_FEATURESET_MESSAGEENCODING']._serialized_start=10044
+ _globals['_FEATURESET_MESSAGEENCODING']._serialized_end=10127
+ _globals['_FEATURESET_JSONFORMAT']._serialized_start=10129
+ _globals['_FEATURESET_JSONFORMAT']._serialized_end=10201
+ _globals['_FEATURESETDEFAULTS']._serialized_start=10236
+ _globals['_FEATURESETDEFAULTS']._serialized_end=10618
+ _globals['_FEATURESETDEFAULTS_FEATURESETEDITIONDEFAULT']._serialized_start=10483
+ _globals['_FEATURESETDEFAULTS_FEATURESETEDITIONDEFAULT']._serialized_end=10618
+ _globals['_SOURCECODEINFO']._serialized_start=10621
+ _globals['_SOURCECODEINFO']._serialized_end=10916
+ _globals['_SOURCECODEINFO_LOCATION']._serialized_start=10710
+ _globals['_SOURCECODEINFO_LOCATION']._serialized_end=10916
+ _globals['_GENERATEDCODEINFO']._serialized_start=10919
+ _globals['_GENERATEDCODEINFO']._serialized_end=11255
+ _globals['_GENERATEDCODEINFO_ANNOTATION']._serialized_start=11020
+ _globals['_GENERATEDCODEINFO_ANNOTATION']._serialized_end=11255
+ _globals['_GENERATEDCODEINFO_ANNOTATION_SEMANTIC']._serialized_start=11215
+ _globals['_GENERATEDCODEINFO_ANNOTATION_SEMANTIC']._serialized_end=11255
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/protobuf/descriptor_pool.py b/Lib/site-packages/google/protobuf/descriptor_pool.py
new file mode 100644
index 0000000..c2fe59f
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/descriptor_pool.py
@@ -0,0 +1,1271 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Provides DescriptorPool to use as a container for proto2 descriptors.
+
+The DescriptorPool is used in conjection with a DescriptorDatabase to maintain
+a collection of protocol buffer descriptors for use when dynamically creating
+message types at runtime.
+
+For most applications protocol buffers should be used via modules generated by
+the protocol buffer compiler tool. This should only be used when the type of
+protocol buffers used in an application or library cannot be predetermined.
+
+Below is a straightforward example on how to use this class::
+
+ pool = DescriptorPool()
+ file_descriptor_protos = [ ... ]
+ for file_descriptor_proto in file_descriptor_protos:
+ pool.Add(file_descriptor_proto)
+ my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType')
+
+The message descriptor can be used in conjunction with the message_factory
+module in order to create a protocol buffer class that can be encoded and
+decoded.
+
+If you want to get a Python class for the specified proto, use the
+helper functions inside google.protobuf.message_factory
+directly instead of this class.
+"""
+
+__author__ = 'matthewtoia@google.com (Matt Toia)'
+
+import collections
+import warnings
+
+from google.protobuf import descriptor
+from google.protobuf import descriptor_database
+from google.protobuf import text_encoding
+from google.protobuf.internal import python_message
+
+_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access
+
+
+def _Deprecated(func):
+ """Mark functions as deprecated."""
+
+ def NewFunc(*args, **kwargs):
+ warnings.warn(
+ 'Call to deprecated function %s(). Note: Do add unlinked descriptors '
+ 'to descriptor_pool is wrong. Please use Add() or AddSerializedFile() '
+ 'instead. This function will be removed soon.' % func.__name__,
+ category=DeprecationWarning)
+ return func(*args, **kwargs)
+ NewFunc.__name__ = func.__name__
+ NewFunc.__doc__ = func.__doc__
+ NewFunc.__dict__.update(func.__dict__)
+ return NewFunc
+
+
+def _NormalizeFullyQualifiedName(name):
+ """Remove leading period from fully-qualified type name.
+
+ Due to b/13860351 in descriptor_database.py, types in the root namespace are
+ generated with a leading period. This function removes that prefix.
+
+ Args:
+ name (str): The fully-qualified symbol name.
+
+ Returns:
+ str: The normalized fully-qualified symbol name.
+ """
+ return name.lstrip('.')
+
+
+def _OptionsOrNone(descriptor_proto):
+ """Returns the value of the field `options`, or None if it is not set."""
+ if descriptor_proto.HasField('options'):
+ return descriptor_proto.options
+ else:
+ return None
+
+
+def _IsMessageSetExtension(field):
+ return (field.is_extension and
+ field.containing_type.has_options and
+ field.containing_type.GetOptions().message_set_wire_format and
+ field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
+ field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL)
+
+
+class DescriptorPool(object):
+ """A collection of protobufs dynamically constructed by descriptor protos."""
+
+ if _USE_C_DESCRIPTORS:
+
+ def __new__(cls, descriptor_db=None):
+ # pylint: disable=protected-access
+ return descriptor._message.DescriptorPool(descriptor_db)
+
+ def __init__(
+ self, descriptor_db=None, use_deprecated_legacy_json_field_conflicts=False
+ ):
+ """Initializes a Pool of proto buffs.
+
+ The descriptor_db argument to the constructor is provided to allow
+ specialized file descriptor proto lookup code to be triggered on demand. An
+ example would be an implementation which will read and compile a file
+ specified in a call to FindFileByName() and not require the call to Add()
+ at all. Results from this database will be cached internally here as well.
+
+ Args:
+ descriptor_db: A secondary source of file descriptors.
+ use_deprecated_legacy_json_field_conflicts: Unused, for compatibility with
+ C++.
+ """
+
+ self._internal_db = descriptor_database.DescriptorDatabase()
+ self._descriptor_db = descriptor_db
+ self._descriptors = {}
+ self._enum_descriptors = {}
+ self._service_descriptors = {}
+ self._file_descriptors = {}
+ self._toplevel_extensions = {}
+ self._top_enum_values = {}
+ # We store extensions in two two-level mappings: The first key is the
+ # descriptor of the message being extended, the second key is the extension
+ # full name or its tag number.
+ self._extensions_by_name = collections.defaultdict(dict)
+ self._extensions_by_number = collections.defaultdict(dict)
+
+ def _CheckConflictRegister(self, desc, desc_name, file_name):
+ """Check if the descriptor name conflicts with another of the same name.
+
+ Args:
+ desc: Descriptor of a message, enum, service, extension or enum value.
+ desc_name (str): the full name of desc.
+ file_name (str): The file name of descriptor.
+ """
+ for register, descriptor_type in [
+ (self._descriptors, descriptor.Descriptor),
+ (self._enum_descriptors, descriptor.EnumDescriptor),
+ (self._service_descriptors, descriptor.ServiceDescriptor),
+ (self._toplevel_extensions, descriptor.FieldDescriptor),
+ (self._top_enum_values, descriptor.EnumValueDescriptor)]:
+ if desc_name in register:
+ old_desc = register[desc_name]
+ if isinstance(old_desc, descriptor.EnumValueDescriptor):
+ old_file = old_desc.type.file.name
+ else:
+ old_file = old_desc.file.name
+
+ if not isinstance(desc, descriptor_type) or (
+ old_file != file_name):
+ error_msg = ('Conflict register for file "' + file_name +
+ '": ' + desc_name +
+ ' is already defined in file "' +
+ old_file + '". Please fix the conflict by adding '
+ 'package name on the proto file, or use different '
+ 'name for the duplication.')
+ if isinstance(desc, descriptor.EnumValueDescriptor):
+ error_msg += ('\nNote: enum values appear as '
+ 'siblings of the enum type instead of '
+ 'children of it.')
+
+ raise TypeError(error_msg)
+
+ return
+
+ def Add(self, file_desc_proto):
+ """Adds the FileDescriptorProto and its types to this pool.
+
+ Args:
+ file_desc_proto (FileDescriptorProto): The file descriptor to add.
+ """
+
+ self._internal_db.Add(file_desc_proto)
+
+ def AddSerializedFile(self, serialized_file_desc_proto):
+ """Adds the FileDescriptorProto and its types to this pool.
+
+ Args:
+ serialized_file_desc_proto (bytes): A bytes string, serialization of the
+ :class:`FileDescriptorProto` to add.
+
+ Returns:
+ FileDescriptor: Descriptor for the added file.
+ """
+
+ # pylint: disable=g-import-not-at-top
+ from google.protobuf import descriptor_pb2
+ file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString(
+ serialized_file_desc_proto)
+ file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto)
+ file_desc.serialized_pb = serialized_file_desc_proto
+ return file_desc
+
+ # Add Descriptor to descriptor pool is deprecated. Please use Add()
+ # or AddSerializedFile() to add a FileDescriptorProto instead.
+ @_Deprecated
+ def AddDescriptor(self, desc):
+ self._AddDescriptor(desc)
+
+ # Never call this method. It is for internal usage only.
+ def _AddDescriptor(self, desc):
+ """Adds a Descriptor to the pool, non-recursively.
+
+ If the Descriptor contains nested messages or enums, the caller must
+ explicitly register them. This method also registers the FileDescriptor
+ associated with the message.
+
+ Args:
+ desc: A Descriptor.
+ """
+ if not isinstance(desc, descriptor.Descriptor):
+ raise TypeError('Expected instance of descriptor.Descriptor.')
+
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
+
+ self._descriptors[desc.full_name] = desc
+ self._AddFileDescriptor(desc.file)
+
+ # Never call this method. It is for internal usage only.
+ def _AddEnumDescriptor(self, enum_desc):
+ """Adds an EnumDescriptor to the pool.
+
+ This method also registers the FileDescriptor associated with the enum.
+
+ Args:
+ enum_desc: An EnumDescriptor.
+ """
+
+ if not isinstance(enum_desc, descriptor.EnumDescriptor):
+ raise TypeError('Expected instance of descriptor.EnumDescriptor.')
+
+ file_name = enum_desc.file.name
+ self._CheckConflictRegister(enum_desc, enum_desc.full_name, file_name)
+ self._enum_descriptors[enum_desc.full_name] = enum_desc
+
+ # Top enum values need to be indexed.
+ # Count the number of dots to see whether the enum is toplevel or nested
+ # in a message. We cannot use enum_desc.containing_type at this stage.
+ if enum_desc.file.package:
+ top_level = (enum_desc.full_name.count('.')
+ - enum_desc.file.package.count('.') == 1)
+ else:
+ top_level = enum_desc.full_name.count('.') == 0
+ if top_level:
+ file_name = enum_desc.file.name
+ package = enum_desc.file.package
+ for enum_value in enum_desc.values:
+ full_name = _NormalizeFullyQualifiedName(
+ '.'.join((package, enum_value.name)))
+ self._CheckConflictRegister(enum_value, full_name, file_name)
+ self._top_enum_values[full_name] = enum_value
+ self._AddFileDescriptor(enum_desc.file)
+
+ # Add ServiceDescriptor to descriptor pool is deprecated. Please use Add()
+ # or AddSerializedFile() to add a FileDescriptorProto instead.
+ @_Deprecated
+ def AddServiceDescriptor(self, service_desc):
+ self._AddServiceDescriptor(service_desc)
+
+ # Never call this method. It is for internal usage only.
+ def _AddServiceDescriptor(self, service_desc):
+ """Adds a ServiceDescriptor to the pool.
+
+ Args:
+ service_desc: A ServiceDescriptor.
+ """
+
+ if not isinstance(service_desc, descriptor.ServiceDescriptor):
+ raise TypeError('Expected instance of descriptor.ServiceDescriptor.')
+
+ self._CheckConflictRegister(service_desc, service_desc.full_name,
+ service_desc.file.name)
+ self._service_descriptors[service_desc.full_name] = service_desc
+
+ # Add ExtensionDescriptor to descriptor pool is deprecated. Please use Add()
+ # or AddSerializedFile() to add a FileDescriptorProto instead.
+ @_Deprecated
+ def AddExtensionDescriptor(self, extension):
+ self._AddExtensionDescriptor(extension)
+
+ # Never call this method. It is for internal usage only.
+ def _AddExtensionDescriptor(self, extension):
+ """Adds a FieldDescriptor describing an extension to the pool.
+
+ Args:
+ extension: A FieldDescriptor.
+
+ Raises:
+ AssertionError: when another extension with the same number extends the
+ same message.
+ TypeError: when the specified extension is not a
+ descriptor.FieldDescriptor.
+ """
+ if not (isinstance(extension, descriptor.FieldDescriptor) and
+ extension.is_extension):
+ raise TypeError('Expected an extension descriptor.')
+
+ if extension.extension_scope is None:
+ self._CheckConflictRegister(
+ extension, extension.full_name, extension.file.name)
+ self._toplevel_extensions[extension.full_name] = extension
+
+ try:
+ existing_desc = self._extensions_by_number[
+ extension.containing_type][extension.number]
+ except KeyError:
+ pass
+ else:
+ if extension is not existing_desc:
+ raise AssertionError(
+ 'Extensions "%s" and "%s" both try to extend message type "%s" '
+ 'with field number %d.' %
+ (extension.full_name, existing_desc.full_name,
+ extension.containing_type.full_name, extension.number))
+
+ self._extensions_by_number[extension.containing_type][
+ extension.number] = extension
+ self._extensions_by_name[extension.containing_type][
+ extension.full_name] = extension
+
+ # Also register MessageSet extensions with the type name.
+ if _IsMessageSetExtension(extension):
+ self._extensions_by_name[extension.containing_type][
+ extension.message_type.full_name] = extension
+
+ if hasattr(extension.containing_type, '_concrete_class'):
+ python_message._AttachFieldHelpers(
+ extension.containing_type._concrete_class, extension)
+
+ @_Deprecated
+ def AddFileDescriptor(self, file_desc):
+ self._InternalAddFileDescriptor(file_desc)
+
+ # Never call this method. It is for internal usage only.
+ def _InternalAddFileDescriptor(self, file_desc):
+ """Adds a FileDescriptor to the pool, non-recursively.
+
+ If the FileDescriptor contains messages or enums, the caller must explicitly
+ register them.
+
+ Args:
+ file_desc: A FileDescriptor.
+ """
+
+ self._AddFileDescriptor(file_desc)
+
+ def _AddFileDescriptor(self, file_desc):
+ """Adds a FileDescriptor to the pool, non-recursively.
+
+ If the FileDescriptor contains messages or enums, the caller must explicitly
+ register them.
+
+ Args:
+ file_desc: A FileDescriptor.
+ """
+
+ if not isinstance(file_desc, descriptor.FileDescriptor):
+ raise TypeError('Expected instance of descriptor.FileDescriptor.')
+ self._file_descriptors[file_desc.name] = file_desc
+
+ def FindFileByName(self, file_name):
+ """Gets a FileDescriptor by file name.
+
+ Args:
+ file_name (str): The path to the file to get a descriptor for.
+
+ Returns:
+ FileDescriptor: The descriptor for the named file.
+
+ Raises:
+ KeyError: if the file cannot be found in the pool.
+ """
+
+ try:
+ return self._file_descriptors[file_name]
+ except KeyError:
+ pass
+
+ try:
+ file_proto = self._internal_db.FindFileByName(file_name)
+ except KeyError as error:
+ if self._descriptor_db:
+ file_proto = self._descriptor_db.FindFileByName(file_name)
+ else:
+ raise error
+ if not file_proto:
+ raise KeyError('Cannot find a file named %s' % file_name)
+ return self._ConvertFileProtoToFileDescriptor(file_proto)
+
+ def FindFileContainingSymbol(self, symbol):
+ """Gets the FileDescriptor for the file containing the specified symbol.
+
+ Args:
+ symbol (str): The name of the symbol to search for.
+
+ Returns:
+ FileDescriptor: Descriptor for the file that contains the specified
+ symbol.
+
+ Raises:
+ KeyError: if the file cannot be found in the pool.
+ """
+
+ symbol = _NormalizeFullyQualifiedName(symbol)
+ try:
+ return self._InternalFindFileContainingSymbol(symbol)
+ except KeyError:
+ pass
+
+ try:
+ # Try fallback database. Build and find again if possible.
+ self._FindFileContainingSymbolInDb(symbol)
+ return self._InternalFindFileContainingSymbol(symbol)
+ except KeyError:
+ raise KeyError('Cannot find a file containing %s' % symbol)
+
+ def _InternalFindFileContainingSymbol(self, symbol):
+ """Gets the already built FileDescriptor containing the specified symbol.
+
+ Args:
+ symbol (str): The name of the symbol to search for.
+
+ Returns:
+ FileDescriptor: Descriptor for the file that contains the specified
+ symbol.
+
+ Raises:
+ KeyError: if the file cannot be found in the pool.
+ """
+ try:
+ return self._descriptors[symbol].file
+ except KeyError:
+ pass
+
+ try:
+ return self._enum_descriptors[symbol].file
+ except KeyError:
+ pass
+
+ try:
+ return self._service_descriptors[symbol].file
+ except KeyError:
+ pass
+
+ try:
+ return self._top_enum_values[symbol].type.file
+ except KeyError:
+ pass
+
+ try:
+ return self._toplevel_extensions[symbol].file
+ except KeyError:
+ pass
+
+ # Try fields, enum values and nested extensions inside a message.
+ top_name, _, sub_name = symbol.rpartition('.')
+ try:
+ message = self.FindMessageTypeByName(top_name)
+ assert (sub_name in message.extensions_by_name or
+ sub_name in message.fields_by_name or
+ sub_name in message.enum_values_by_name)
+ return message.file
+ except (KeyError, AssertionError):
+ raise KeyError('Cannot find a file containing %s' % symbol)
+
+ def FindMessageTypeByName(self, full_name):
+ """Loads the named descriptor from the pool.
+
+ Args:
+ full_name (str): The full name of the descriptor to load.
+
+ Returns:
+ Descriptor: The descriptor for the named type.
+
+ Raises:
+ KeyError: if the message cannot be found in the pool.
+ """
+
+ full_name = _NormalizeFullyQualifiedName(full_name)
+ if full_name not in self._descriptors:
+ self._FindFileContainingSymbolInDb(full_name)
+ return self._descriptors[full_name]
+
+ def FindEnumTypeByName(self, full_name):
+ """Loads the named enum descriptor from the pool.
+
+ Args:
+ full_name (str): The full name of the enum descriptor to load.
+
+ Returns:
+ EnumDescriptor: The enum descriptor for the named type.
+
+ Raises:
+ KeyError: if the enum cannot be found in the pool.
+ """
+
+ full_name = _NormalizeFullyQualifiedName(full_name)
+ if full_name not in self._enum_descriptors:
+ self._FindFileContainingSymbolInDb(full_name)
+ return self._enum_descriptors[full_name]
+
+ def FindFieldByName(self, full_name):
+ """Loads the named field descriptor from the pool.
+
+ Args:
+ full_name (str): The full name of the field descriptor to load.
+
+ Returns:
+ FieldDescriptor: The field descriptor for the named field.
+
+ Raises:
+ KeyError: if the field cannot be found in the pool.
+ """
+ full_name = _NormalizeFullyQualifiedName(full_name)
+ message_name, _, field_name = full_name.rpartition('.')
+ message_descriptor = self.FindMessageTypeByName(message_name)
+ return message_descriptor.fields_by_name[field_name]
+
+ def FindOneofByName(self, full_name):
+ """Loads the named oneof descriptor from the pool.
+
+ Args:
+ full_name (str): The full name of the oneof descriptor to load.
+
+ Returns:
+ OneofDescriptor: The oneof descriptor for the named oneof.
+
+ Raises:
+ KeyError: if the oneof cannot be found in the pool.
+ """
+ full_name = _NormalizeFullyQualifiedName(full_name)
+ message_name, _, oneof_name = full_name.rpartition('.')
+ message_descriptor = self.FindMessageTypeByName(message_name)
+ return message_descriptor.oneofs_by_name[oneof_name]
+
+ def FindExtensionByName(self, full_name):
+ """Loads the named extension descriptor from the pool.
+
+ Args:
+ full_name (str): The full name of the extension descriptor to load.
+
+ Returns:
+ FieldDescriptor: The field descriptor for the named extension.
+
+ Raises:
+ KeyError: if the extension cannot be found in the pool.
+ """
+ full_name = _NormalizeFullyQualifiedName(full_name)
+ try:
+ # The proto compiler does not give any link between the FileDescriptor
+ # and top-level extensions unless the FileDescriptorProto is added to
+ # the DescriptorDatabase, but this can impact memory usage.
+ # So we registered these extensions by name explicitly.
+ return self._toplevel_extensions[full_name]
+ except KeyError:
+ pass
+ message_name, _, extension_name = full_name.rpartition('.')
+ try:
+ # Most extensions are nested inside a message.
+ scope = self.FindMessageTypeByName(message_name)
+ except KeyError:
+ # Some extensions are defined at file scope.
+ scope = self._FindFileContainingSymbolInDb(full_name)
+ return scope.extensions_by_name[extension_name]
+
+ def FindExtensionByNumber(self, message_descriptor, number):
+ """Gets the extension of the specified message with the specified number.
+
+ Extensions have to be registered to this pool by calling :func:`Add` or
+ :func:`AddExtensionDescriptor`.
+
+ Args:
+ message_descriptor (Descriptor): descriptor of the extended message.
+ number (int): Number of the extension field.
+
+ Returns:
+ FieldDescriptor: The descriptor for the extension.
+
+ Raises:
+ KeyError: when no extension with the given number is known for the
+ specified message.
+ """
+ try:
+ return self._extensions_by_number[message_descriptor][number]
+ except KeyError:
+ self._TryLoadExtensionFromDB(message_descriptor, number)
+ return self._extensions_by_number[message_descriptor][number]
+
+ def FindAllExtensions(self, message_descriptor):
+ """Gets all the known extensions of a given message.
+
+ Extensions have to be registered to this pool by build related
+ :func:`Add` or :func:`AddExtensionDescriptor`.
+
+ Args:
+ message_descriptor (Descriptor): Descriptor of the extended message.
+
+ Returns:
+ list[FieldDescriptor]: Field descriptors describing the extensions.
+ """
+ # Fallback to descriptor db if FindAllExtensionNumbers is provided.
+ if self._descriptor_db and hasattr(
+ self._descriptor_db, 'FindAllExtensionNumbers'):
+ full_name = message_descriptor.full_name
+ all_numbers = self._descriptor_db.FindAllExtensionNumbers(full_name)
+ for number in all_numbers:
+ if number in self._extensions_by_number[message_descriptor]:
+ continue
+ self._TryLoadExtensionFromDB(message_descriptor, number)
+
+ return list(self._extensions_by_number[message_descriptor].values())
+
+ def _TryLoadExtensionFromDB(self, message_descriptor, number):
+ """Try to Load extensions from descriptor db.
+
+ Args:
+ message_descriptor: descriptor of the extended message.
+ number: the extension number that needs to be loaded.
+ """
+ if not self._descriptor_db:
+ return
+ # Only supported when FindFileContainingExtension is provided.
+ if not hasattr(
+ self._descriptor_db, 'FindFileContainingExtension'):
+ return
+
+ full_name = message_descriptor.full_name
+ file_proto = self._descriptor_db.FindFileContainingExtension(
+ full_name, number)
+
+ if file_proto is None:
+ return
+
+ try:
+ self._ConvertFileProtoToFileDescriptor(file_proto)
+ except:
+ warn_msg = ('Unable to load proto file %s for extension number %d.' %
+ (file_proto.name, number))
+ warnings.warn(warn_msg, RuntimeWarning)
+
+ def FindServiceByName(self, full_name):
+ """Loads the named service descriptor from the pool.
+
+ Args:
+ full_name (str): The full name of the service descriptor to load.
+
+ Returns:
+ ServiceDescriptor: The service descriptor for the named service.
+
+ Raises:
+ KeyError: if the service cannot be found in the pool.
+ """
+ full_name = _NormalizeFullyQualifiedName(full_name)
+ if full_name not in self._service_descriptors:
+ self._FindFileContainingSymbolInDb(full_name)
+ return self._service_descriptors[full_name]
+
+ def FindMethodByName(self, full_name):
+ """Loads the named service method descriptor from the pool.
+
+ Args:
+ full_name (str): The full name of the method descriptor to load.
+
+ Returns:
+ MethodDescriptor: The method descriptor for the service method.
+
+ Raises:
+ KeyError: if the method cannot be found in the pool.
+ """
+ full_name = _NormalizeFullyQualifiedName(full_name)
+ service_name, _, method_name = full_name.rpartition('.')
+ service_descriptor = self.FindServiceByName(service_name)
+ return service_descriptor.methods_by_name[method_name]
+
+ def _FindFileContainingSymbolInDb(self, symbol):
+ """Finds the file in descriptor DB containing the specified symbol.
+
+ Args:
+ symbol (str): The name of the symbol to search for.
+
+ Returns:
+ FileDescriptor: The file that contains the specified symbol.
+
+ Raises:
+ KeyError: if the file cannot be found in the descriptor database.
+ """
+ try:
+ file_proto = self._internal_db.FindFileContainingSymbol(symbol)
+ except KeyError as error:
+ if self._descriptor_db:
+ file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
+ else:
+ raise error
+ if not file_proto:
+ raise KeyError('Cannot find a file containing %s' % symbol)
+ return self._ConvertFileProtoToFileDescriptor(file_proto)
+
+ def _ConvertFileProtoToFileDescriptor(self, file_proto):
+ """Creates a FileDescriptor from a proto or returns a cached copy.
+
+ This method also has the side effect of loading all the symbols found in
+ the file into the appropriate dictionaries in the pool.
+
+ Args:
+ file_proto: The proto to convert.
+
+ Returns:
+ A FileDescriptor matching the passed in proto.
+ """
+ if file_proto.name not in self._file_descriptors:
+ built_deps = list(self._GetDeps(file_proto.dependency))
+ direct_deps = [self.FindFileByName(n) for n in file_proto.dependency]
+ public_deps = [direct_deps[i] for i in file_proto.public_dependency]
+
+ file_descriptor = descriptor.FileDescriptor(
+ pool=self,
+ name=file_proto.name,
+ package=file_proto.package,
+ syntax=file_proto.syntax,
+ options=_OptionsOrNone(file_proto),
+ serialized_pb=file_proto.SerializeToString(),
+ dependencies=direct_deps,
+ public_dependencies=public_deps,
+ # pylint: disable=protected-access
+ create_key=descriptor._internal_create_key)
+ scope = {}
+
+ # This loop extracts all the message and enum types from all the
+ # dependencies of the file_proto. This is necessary to create the
+ # scope of available message types when defining the passed in
+ # file proto.
+ for dependency in built_deps:
+ scope.update(self._ExtractSymbols(
+ dependency.message_types_by_name.values()))
+ scope.update((_PrefixWithDot(enum.full_name), enum)
+ for enum in dependency.enum_types_by_name.values())
+
+ for message_type in file_proto.message_type:
+ message_desc = self._ConvertMessageDescriptor(
+ message_type, file_proto.package, file_descriptor, scope,
+ file_proto.syntax)
+ file_descriptor.message_types_by_name[message_desc.name] = (
+ message_desc)
+
+ for enum_type in file_proto.enum_type:
+ file_descriptor.enum_types_by_name[enum_type.name] = (
+ self._ConvertEnumDescriptor(enum_type, file_proto.package,
+ file_descriptor, None, scope, True))
+
+ for index, extension_proto in enumerate(file_proto.extension):
+ extension_desc = self._MakeFieldDescriptor(
+ extension_proto, file_proto.package, index, file_descriptor,
+ is_extension=True)
+ extension_desc.containing_type = self._GetTypeFromScope(
+ file_descriptor.package, extension_proto.extendee, scope)
+ self._SetFieldType(extension_proto, extension_desc,
+ file_descriptor.package, scope)
+ file_descriptor.extensions_by_name[extension_desc.name] = (
+ extension_desc)
+
+ for desc_proto in file_proto.message_type:
+ self._SetAllFieldTypes(file_proto.package, desc_proto, scope)
+
+ if file_proto.package:
+ desc_proto_prefix = _PrefixWithDot(file_proto.package)
+ else:
+ desc_proto_prefix = ''
+
+ for desc_proto in file_proto.message_type:
+ desc = self._GetTypeFromScope(
+ desc_proto_prefix, desc_proto.name, scope)
+ file_descriptor.message_types_by_name[desc_proto.name] = desc
+
+ for index, service_proto in enumerate(file_proto.service):
+ file_descriptor.services_by_name[service_proto.name] = (
+ self._MakeServiceDescriptor(service_proto, index, scope,
+ file_proto.package, file_descriptor))
+
+ self._file_descriptors[file_proto.name] = file_descriptor
+
+ # Add extensions to the pool
+ def AddExtensionForNested(message_type):
+ for nested in message_type.nested_types:
+ AddExtensionForNested(nested)
+ for extension in message_type.extensions:
+ self._AddExtensionDescriptor(extension)
+
+ file_desc = self._file_descriptors[file_proto.name]
+ for extension in file_desc.extensions_by_name.values():
+ self._AddExtensionDescriptor(extension)
+ for message_type in file_desc.message_types_by_name.values():
+ AddExtensionForNested(message_type)
+
+ return file_desc
+
+ def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None,
+ scope=None, syntax=None):
+ """Adds the proto to the pool in the specified package.
+
+ Args:
+ desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
+ package: The package the proto should be located in.
+ file_desc: The file containing this message.
+ scope: Dict mapping short and full symbols to message and enum types.
+ syntax: string indicating syntax of the file ("proto2" or "proto3")
+
+ Returns:
+ The added descriptor.
+ """
+
+ if package:
+ desc_name = '.'.join((package, desc_proto.name))
+ else:
+ desc_name = desc_proto.name
+
+ if file_desc is None:
+ file_name = None
+ else:
+ file_name = file_desc.name
+
+ if scope is None:
+ scope = {}
+
+ nested = [
+ self._ConvertMessageDescriptor(
+ nested, desc_name, file_desc, scope, syntax)
+ for nested in desc_proto.nested_type]
+ enums = [
+ self._ConvertEnumDescriptor(enum, desc_name, file_desc, None,
+ scope, False)
+ for enum in desc_proto.enum_type]
+ fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc)
+ for index, field in enumerate(desc_proto.field)]
+ extensions = [
+ self._MakeFieldDescriptor(extension, desc_name, index, file_desc,
+ is_extension=True)
+ for index, extension in enumerate(desc_proto.extension)]
+ oneofs = [
+ # pylint: disable=g-complex-comprehension
+ descriptor.OneofDescriptor(
+ desc.name,
+ '.'.join((desc_name, desc.name)),
+ index,
+ None,
+ [],
+ _OptionsOrNone(desc),
+ # pylint: disable=protected-access
+ create_key=descriptor._internal_create_key)
+ for index, desc in enumerate(desc_proto.oneof_decl)
+ ]
+ extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range]
+ if extension_ranges:
+ is_extendable = True
+ else:
+ is_extendable = False
+ desc = descriptor.Descriptor(
+ name=desc_proto.name,
+ full_name=desc_name,
+ filename=file_name,
+ containing_type=None,
+ fields=fields,
+ oneofs=oneofs,
+ nested_types=nested,
+ enum_types=enums,
+ extensions=extensions,
+ options=_OptionsOrNone(desc_proto),
+ is_extendable=is_extendable,
+ extension_ranges=extension_ranges,
+ file=file_desc,
+ serialized_start=None,
+ serialized_end=None,
+ syntax=syntax,
+ is_map_entry=desc_proto.options.map_entry,
+ # pylint: disable=protected-access
+ create_key=descriptor._internal_create_key)
+ for nested in desc.nested_types:
+ nested.containing_type = desc
+ for enum in desc.enum_types:
+ enum.containing_type = desc
+ for field_index, field_desc in enumerate(desc_proto.field):
+ if field_desc.HasField('oneof_index'):
+ oneof_index = field_desc.oneof_index
+ oneofs[oneof_index].fields.append(fields[field_index])
+ fields[field_index].containing_oneof = oneofs[oneof_index]
+
+ scope[_PrefixWithDot(desc_name)] = desc
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
+ self._descriptors[desc_name] = desc
+ return desc
+
+ def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None,
+ containing_type=None, scope=None, top_level=False):
+ """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf.
+
+ Args:
+ enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message.
+ package: Optional package name for the new message EnumDescriptor.
+ file_desc: The file containing the enum descriptor.
+ containing_type: The type containing this enum.
+ scope: Scope containing available types.
+ top_level: If True, the enum is a top level symbol. If False, the enum
+ is defined inside a message.
+
+ Returns:
+ The added descriptor
+ """
+
+ if package:
+ enum_name = '.'.join((package, enum_proto.name))
+ else:
+ enum_name = enum_proto.name
+
+ if file_desc is None:
+ file_name = None
+ else:
+ file_name = file_desc.name
+
+ values = [self._MakeEnumValueDescriptor(value, index)
+ for index, value in enumerate(enum_proto.value)]
+ desc = descriptor.EnumDescriptor(name=enum_proto.name,
+ full_name=enum_name,
+ filename=file_name,
+ file=file_desc,
+ values=values,
+ containing_type=containing_type,
+ options=_OptionsOrNone(enum_proto),
+ # pylint: disable=protected-access
+ create_key=descriptor._internal_create_key)
+ scope['.%s' % enum_name] = desc
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
+ self._enum_descriptors[enum_name] = desc
+
+ # Add top level enum values.
+ if top_level:
+ for value in values:
+ full_name = _NormalizeFullyQualifiedName(
+ '.'.join((package, value.name)))
+ self._CheckConflictRegister(value, full_name, file_name)
+ self._top_enum_values[full_name] = value
+
+ return desc
+
+ def _MakeFieldDescriptor(self, field_proto, message_name, index,
+ file_desc, is_extension=False):
+ """Creates a field descriptor from a FieldDescriptorProto.
+
+ For message and enum type fields, this method will do a look up
+ in the pool for the appropriate descriptor for that type. If it
+ is unavailable, it will fall back to the _source function to
+ create it. If this type is still unavailable, construction will
+ fail.
+
+ Args:
+ field_proto: The proto describing the field.
+ message_name: The name of the containing message.
+ index: Index of the field
+ file_desc: The file containing the field descriptor.
+ is_extension: Indication that this field is for an extension.
+
+ Returns:
+ An initialized FieldDescriptor object
+ """
+
+ if message_name:
+ full_name = '.'.join((message_name, field_proto.name))
+ else:
+ full_name = field_proto.name
+
+ if field_proto.json_name:
+ json_name = field_proto.json_name
+ else:
+ json_name = None
+
+ return descriptor.FieldDescriptor(
+ name=field_proto.name,
+ full_name=full_name,
+ index=index,
+ number=field_proto.number,
+ type=field_proto.type,
+ cpp_type=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ label=field_proto.label,
+ has_default_value=False,
+ default_value=None,
+ is_extension=is_extension,
+ extension_scope=None,
+ options=_OptionsOrNone(field_proto),
+ json_name=json_name,
+ file=file_desc,
+ # pylint: disable=protected-access
+ create_key=descriptor._internal_create_key)
+
+ def _SetAllFieldTypes(self, package, desc_proto, scope):
+ """Sets all the descriptor's fields's types.
+
+ This method also sets the containing types on any extensions.
+
+ Args:
+ package: The current package of desc_proto.
+ desc_proto: The message descriptor to update.
+ scope: Enclosing scope of available types.
+ """
+
+ package = _PrefixWithDot(package)
+
+ main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
+
+ if package == '.':
+ nested_package = _PrefixWithDot(desc_proto.name)
+ else:
+ nested_package = '.'.join([package, desc_proto.name])
+
+ for field_proto, field_desc in zip(desc_proto.field, main_desc.fields):
+ self._SetFieldType(field_proto, field_desc, nested_package, scope)
+
+ for extension_proto, extension_desc in (
+ zip(desc_proto.extension, main_desc.extensions)):
+ extension_desc.containing_type = self._GetTypeFromScope(
+ nested_package, extension_proto.extendee, scope)
+ self._SetFieldType(extension_proto, extension_desc, nested_package, scope)
+
+ for nested_type in desc_proto.nested_type:
+ self._SetAllFieldTypes(nested_package, nested_type, scope)
+
+ def _SetFieldType(self, field_proto, field_desc, package, scope):
+ """Sets the field's type, cpp_type, message_type and enum_type.
+
+ Args:
+ field_proto: Data about the field in proto format.
+ field_desc: The descriptor to modify.
+ package: The package the field's container is in.
+ scope: Enclosing scope of available types.
+ """
+ if field_proto.type_name:
+ desc = self._GetTypeFromScope(package, field_proto.type_name, scope)
+ else:
+ desc = None
+
+ if not field_proto.HasField('type'):
+ if isinstance(desc, descriptor.Descriptor):
+ field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE
+ else:
+ field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM
+
+ field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType(
+ field_proto.type)
+
+ if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE
+ or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP):
+ field_desc.message_type = desc
+
+ if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
+ field_desc.enum_type = desc
+
+ if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+ field_desc.has_default_value = False
+ field_desc.default_value = []
+ elif field_proto.HasField('default_value'):
+ field_desc.has_default_value = True
+ if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
+ field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
+ field_desc.default_value = float(field_proto.default_value)
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
+ field_desc.default_value = field_proto.default_value
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
+ field_desc.default_value = field_proto.default_value.lower() == 'true'
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
+ field_desc.default_value = field_desc.enum_type.values_by_name[
+ field_proto.default_value].number
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
+ field_desc.default_value = text_encoding.CUnescape(
+ field_proto.default_value)
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE:
+ field_desc.default_value = None
+ else:
+ # All other types are of the "int" type.
+ field_desc.default_value = int(field_proto.default_value)
+ else:
+ field_desc.has_default_value = False
+ if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
+ field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
+ field_desc.default_value = 0.0
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
+ field_desc.default_value = u''
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
+ field_desc.default_value = False
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
+ field_desc.default_value = field_desc.enum_type.values[0].number
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
+ field_desc.default_value = b''
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE:
+ field_desc.default_value = None
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP:
+ field_desc.default_value = None
+ else:
+ # All other types are of the "int" type.
+ field_desc.default_value = 0
+
+ field_desc.type = field_proto.type
+
+ def _MakeEnumValueDescriptor(self, value_proto, index):
+ """Creates a enum value descriptor object from a enum value proto.
+
+ Args:
+ value_proto: The proto describing the enum value.
+ index: The index of the enum value.
+
+ Returns:
+ An initialized EnumValueDescriptor object.
+ """
+
+ return descriptor.EnumValueDescriptor(
+ name=value_proto.name,
+ index=index,
+ number=value_proto.number,
+ options=_OptionsOrNone(value_proto),
+ type=None,
+ # pylint: disable=protected-access
+ create_key=descriptor._internal_create_key)
+
+ def _MakeServiceDescriptor(self, service_proto, service_index, scope,
+ package, file_desc):
+ """Make a protobuf ServiceDescriptor given a ServiceDescriptorProto.
+
+ Args:
+ service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message.
+ service_index: The index of the service in the File.
+ scope: Dict mapping short and full symbols to message and enum types.
+ package: Optional package name for the new message EnumDescriptor.
+ file_desc: The file containing the service descriptor.
+
+ Returns:
+ The added descriptor.
+ """
+
+ if package:
+ service_name = '.'.join((package, service_proto.name))
+ else:
+ service_name = service_proto.name
+
+ methods = [self._MakeMethodDescriptor(method_proto, service_name, package,
+ scope, index)
+ for index, method_proto in enumerate(service_proto.method)]
+ desc = descriptor.ServiceDescriptor(
+ name=service_proto.name,
+ full_name=service_name,
+ index=service_index,
+ methods=methods,
+ options=_OptionsOrNone(service_proto),
+ file=file_desc,
+ # pylint: disable=protected-access
+ create_key=descriptor._internal_create_key)
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
+ self._service_descriptors[service_name] = desc
+ return desc
+
+ def _MakeMethodDescriptor(self, method_proto, service_name, package, scope,
+ index):
+ """Creates a method descriptor from a MethodDescriptorProto.
+
+ Args:
+ method_proto: The proto describing the method.
+ service_name: The name of the containing service.
+ package: Optional package name to look up for types.
+ scope: Scope containing available types.
+ index: Index of the method in the service.
+
+ Returns:
+ An initialized MethodDescriptor object.
+ """
+ full_name = '.'.join((service_name, method_proto.name))
+ input_type = self._GetTypeFromScope(
+ package, method_proto.input_type, scope)
+ output_type = self._GetTypeFromScope(
+ package, method_proto.output_type, scope)
+ return descriptor.MethodDescriptor(
+ name=method_proto.name,
+ full_name=full_name,
+ index=index,
+ containing_service=None,
+ input_type=input_type,
+ output_type=output_type,
+ client_streaming=method_proto.client_streaming,
+ server_streaming=method_proto.server_streaming,
+ options=_OptionsOrNone(method_proto),
+ # pylint: disable=protected-access
+ create_key=descriptor._internal_create_key)
+
+ def _ExtractSymbols(self, descriptors):
+ """Pulls out all the symbols from descriptor protos.
+
+ Args:
+ descriptors: The messages to extract descriptors from.
+ Yields:
+ A two element tuple of the type name and descriptor object.
+ """
+
+ for desc in descriptors:
+ yield (_PrefixWithDot(desc.full_name), desc)
+ for symbol in self._ExtractSymbols(desc.nested_types):
+ yield symbol
+ for enum in desc.enum_types:
+ yield (_PrefixWithDot(enum.full_name), enum)
+
+ def _GetDeps(self, dependencies, visited=None):
+ """Recursively finds dependencies for file protos.
+
+ Args:
+ dependencies: The names of the files being depended on.
+ visited: The names of files already found.
+
+ Yields:
+ Each direct and indirect dependency.
+ """
+
+ visited = visited or set()
+ for dependency in dependencies:
+ if dependency not in visited:
+ visited.add(dependency)
+ dep_desc = self.FindFileByName(dependency)
+ yield dep_desc
+ public_files = [d.name for d in dep_desc.public_dependencies]
+ yield from self._GetDeps(public_files, visited)
+
+ def _GetTypeFromScope(self, package, type_name, scope):
+ """Finds a given type name in the current scope.
+
+ Args:
+ package: The package the proto should be located in.
+ type_name: The name of the type to be found in the scope.
+ scope: Dict mapping short and full symbols to message and enum types.
+
+ Returns:
+ The descriptor for the requested type.
+ """
+ if type_name not in scope:
+ components = _PrefixWithDot(package).split('.')
+ while components:
+ possible_match = '.'.join(components + [type_name])
+ if possible_match in scope:
+ type_name = possible_match
+ break
+ else:
+ components.pop(-1)
+ return scope[type_name]
+
+
+def _PrefixWithDot(name):
+ return name if name.startswith('.') else '.%s' % name
+
+
+if _USE_C_DESCRIPTORS:
+ # TODO: This pool could be constructed from Python code, when we
+ # support a flag like 'use_cpp_generated_pool=True'.
+ # pylint: disable=protected-access
+ _DEFAULT = descriptor._message.default_pool
+else:
+ _DEFAULT = DescriptorPool()
+
+
+def Default():
+ return _DEFAULT
diff --git a/Lib/site-packages/google/protobuf/duration_pb2.py b/Lib/site-packages/google/protobuf/duration_pb2.py
new file mode 100644
index 0000000..d573dfe
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/duration_pb2.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/duration.proto
+# Protobuf Python Version: 4.25.2
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\":\n\x08\x44uration\x12\x18\n\x07seconds\x18\x01 \x01(\x03R\x07seconds\x12\x14\n\x05nanos\x18\x02 \x01(\x05R\x05nanosB\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ _globals['DESCRIPTOR']._options = None
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _globals['_DURATION']._serialized_start=51
+ _globals['_DURATION']._serialized_end=109
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/protobuf/empty_pb2.py b/Lib/site-packages/google/protobuf/empty_pb2.py
new file mode 100644
index 0000000..d2927ab
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/empty_pb2.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/empty.proto
+# Protobuf Python Version: 4.25.2
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ _globals['DESCRIPTOR']._options = None
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _globals['_EMPTY']._serialized_start=48
+ _globals['_EMPTY']._serialized_end=55
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/protobuf/field_mask_pb2.py b/Lib/site-packages/google/protobuf/field_mask_pb2.py
new file mode 100644
index 0000000..73d9f5a
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/field_mask_pb2.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/field_mask.proto
+# Protobuf Python Version: 4.25.2
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"!\n\tFieldMask\x12\x14\n\x05paths\x18\x01 \x03(\tR\x05pathsB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ _globals['DESCRIPTOR']._options = None
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _globals['_FIELDMASK']._serialized_start=53
+ _globals['_FIELDMASK']._serialized_end=86
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/protobuf/internal/__init__.py b/Lib/site-packages/google/protobuf/internal/__init__.py
new file mode 100644
index 0000000..e676e28
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/internal/__init__.py
@@ -0,0 +1,7 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
diff --git a/Lib/site-packages/google/protobuf/internal/_parameterized.py b/Lib/site-packages/google/protobuf/internal/_parameterized.py
new file mode 100644
index 0000000..4cb2cb1
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/internal/_parameterized.py
@@ -0,0 +1,420 @@
+#! /usr/bin/env python
+#
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Adds support for parameterized tests to Python's unittest TestCase class.
+
+A parameterized test is a method in a test case that is invoked with different
+argument tuples.
+
+A simple example:
+
+ class AdditionExample(_parameterized.TestCase):
+ @_parameterized.parameters(
+ (1, 2, 3),
+ (4, 5, 9),
+ (1, 1, 3))
+ def testAddition(self, op1, op2, result):
+ self.assertEqual(result, op1 + op2)
+
+
+Each invocation is a separate test case and properly isolated just
+like a normal test method, with its own setUp/tearDown cycle. In the
+example above, there are three separate testcases, one of which will
+fail due to an assertion error (1 + 1 != 3).
+
+Parameters for individual test cases can be tuples (with positional parameters)
+or dictionaries (with named parameters):
+
+ class AdditionExample(_parameterized.TestCase):
+ @_parameterized.parameters(
+ {'op1': 1, 'op2': 2, 'result': 3},
+ {'op1': 4, 'op2': 5, 'result': 9},
+ )
+ def testAddition(self, op1, op2, result):
+ self.assertEqual(result, op1 + op2)
+
+If a parameterized test fails, the error message will show the
+original test name (which is modified internally) and the arguments
+for the specific invocation, which are part of the string returned by
+the shortDescription() method on test cases.
+
+The id method of the test, used internally by the unittest framework,
+is also modified to show the arguments. To make sure that test names
+stay the same across several invocations, object representations like
+
+ >>> class Foo(object):
+ ... pass
+ >>> repr(Foo())
+ '<__main__.Foo object at 0x23d8610>'
+
+are turned into '<__main__.Foo>'. For even more descriptive names,
+especially in test logs, you can use the named_parameters decorator. In
+this case, only tuples are supported, and the first parameters has to
+be a string (or an object that returns an apt name when converted via
+str()):
+
+ class NamedExample(_parameterized.TestCase):
+ @_parameterized.named_parameters(
+ ('Normal', 'aa', 'aaa', True),
+ ('EmptyPrefix', '', 'abc', True),
+ ('BothEmpty', '', '', True))
+ def testStartsWith(self, prefix, string, result):
+ self.assertEqual(result, strings.startswith(prefix))
+
+Named tests also have the benefit that they can be run individually
+from the command line:
+
+ $ testmodule.py NamedExample.testStartsWithNormal
+ .
+ --------------------------------------------------------------------
+ Ran 1 test in 0.000s
+
+ OK
+
+Parameterized Classes
+=====================
+If invocation arguments are shared across test methods in a single
+TestCase class, instead of decorating all test methods
+individually, the class itself can be decorated:
+
+ @_parameterized.parameters(
+ (1, 2, 3)
+ (4, 5, 9))
+ class ArithmeticTest(_parameterized.TestCase):
+ def testAdd(self, arg1, arg2, result):
+ self.assertEqual(arg1 + arg2, result)
+
+ def testSubtract(self, arg2, arg2, result):
+ self.assertEqual(result - arg1, arg2)
+
+Inputs from Iterables
+=====================
+If parameters should be shared across several test cases, or are dynamically
+created from other sources, a single non-tuple iterable can be passed into
+the decorator. This iterable will be used to obtain the test cases:
+
+ class AdditionExample(_parameterized.TestCase):
+ @_parameterized.parameters(
+ c.op1, c.op2, c.result for c in testcases
+ )
+ def testAddition(self, op1, op2, result):
+ self.assertEqual(result, op1 + op2)
+
+
+Single-Argument Test Methods
+============================
+If a test method takes only one argument, the single argument does not need to
+be wrapped into a tuple:
+
+ class NegativeNumberExample(_parameterized.TestCase):
+ @_parameterized.parameters(
+ -1, -3, -4, -5
+ )
+ def testIsNegative(self, arg):
+ self.assertTrue(IsNegative(arg))
+"""
+
+__author__ = 'tmarek@google.com (Torsten Marek)'
+
+import functools
+import re
+import types
+import unittest
+import uuid
+
+try:
+ # Since python 3
+ import collections.abc as collections_abc
+except ImportError:
+ # Won't work after python 3.8
+ import collections as collections_abc
+
+ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>')
+_SEPARATOR = uuid.uuid1().hex
+_FIRST_ARG = object()
+_ARGUMENT_REPR = object()
+
+
+def _CleanRepr(obj):
+ return ADDR_RE.sub(r'<\1>', repr(obj))
+
+
+# Helper function formerly from the unittest module, removed from it in
+# Python 2.7.
+def _StrClass(cls):
+ return '%s.%s' % (cls.__module__, cls.__name__)
+
+
+def _NonStringIterable(obj):
+ return (isinstance(obj, collections_abc.Iterable) and
+ not isinstance(obj, str))
+
+
+def _FormatParameterList(testcase_params):
+ if isinstance(testcase_params, collections_abc.Mapping):
+ return ', '.join('%s=%s' % (argname, _CleanRepr(value))
+ for argname, value in testcase_params.items())
+ elif _NonStringIterable(testcase_params):
+ return ', '.join(map(_CleanRepr, testcase_params))
+ else:
+ return _FormatParameterList((testcase_params,))
+
+
+class _ParameterizedTestIter(object):
+ """Callable and iterable class for producing new test cases."""
+
+ def __init__(self, test_method, testcases, naming_type):
+ """Returns concrete test functions for a test and a list of parameters.
+
+ The naming_type is used to determine the name of the concrete
+ functions as reported by the unittest framework. If naming_type is
+ _FIRST_ARG, the testcases must be tuples, and the first element must
+ have a string representation that is a valid Python identifier.
+
+ Args:
+ test_method: The decorated test method.
+ testcases: (list of tuple/dict) A list of parameter
+ tuples/dicts for individual test invocations.
+ naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR.
+ """
+ self._test_method = test_method
+ self.testcases = testcases
+ self._naming_type = naming_type
+
+ def __call__(self, *args, **kwargs):
+ raise RuntimeError('You appear to be running a parameterized test case '
+ 'without having inherited from parameterized.'
+ 'TestCase. This is bad because none of '
+ 'your test cases are actually being run.')
+
+ def __iter__(self):
+ test_method = self._test_method
+ naming_type = self._naming_type
+
+ def MakeBoundParamTest(testcase_params):
+ @functools.wraps(test_method)
+ def BoundParamTest(self):
+ if isinstance(testcase_params, collections_abc.Mapping):
+ test_method(self, **testcase_params)
+ elif _NonStringIterable(testcase_params):
+ test_method(self, *testcase_params)
+ else:
+ test_method(self, testcase_params)
+
+ if naming_type is _FIRST_ARG:
+ # Signal the metaclass that the name of the test function is unique
+ # and descriptive.
+ BoundParamTest.__x_use_name__ = True
+ BoundParamTest.__name__ += str(testcase_params[0])
+ testcase_params = testcase_params[1:]
+ elif naming_type is _ARGUMENT_REPR:
+ # __x_extra_id__ is used to pass naming information to the __new__
+ # method of TestGeneratorMetaclass.
+ # The metaclass will make sure to create a unique, but nondescriptive
+ # name for this test.
+ BoundParamTest.__x_extra_id__ = '(%s)' % (
+ _FormatParameterList(testcase_params),)
+ else:
+ raise RuntimeError('%s is not a valid naming type.' % (naming_type,))
+
+ BoundParamTest.__doc__ = '%s(%s)' % (
+ BoundParamTest.__name__, _FormatParameterList(testcase_params))
+ if test_method.__doc__:
+ BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,)
+ return BoundParamTest
+ return (MakeBoundParamTest(c) for c in self.testcases)
+
+
+def _IsSingletonList(testcases):
+ """True iff testcases contains only a single non-tuple element."""
+ return len(testcases) == 1 and not isinstance(testcases[0], tuple)
+
+
+def _ModifyClass(class_object, testcases, naming_type):
+ assert not getattr(class_object, '_id_suffix', None), (
+ 'Cannot add parameters to %s,'
+ ' which already has parameterized methods.' % (class_object,))
+ class_object._id_suffix = id_suffix = {}
+ # We change the size of __dict__ while we iterate over it,
+ # which Python 3.x will complain about, so use copy().
+ for name, obj in class_object.__dict__.copy().items():
+ if (name.startswith(unittest.TestLoader.testMethodPrefix)
+ and isinstance(obj, types.FunctionType)):
+ delattr(class_object, name)
+ methods = {}
+ _UpdateClassDictForParamTestCase(
+ methods, id_suffix, name,
+ _ParameterizedTestIter(obj, testcases, naming_type))
+ for name, meth in methods.items():
+ setattr(class_object, name, meth)
+
+
+def _ParameterDecorator(naming_type, testcases):
+ """Implementation of the parameterization decorators.
+
+ Args:
+ naming_type: The naming type.
+ testcases: Testcase parameters.
+
+ Returns:
+ A function for modifying the decorated object.
+ """
+ def _Apply(obj):
+ if isinstance(obj, type):
+ _ModifyClass(
+ obj,
+ list(testcases) if not isinstance(testcases, collections_abc.Sequence)
+ else testcases,
+ naming_type)
+ return obj
+ else:
+ return _ParameterizedTestIter(obj, testcases, naming_type)
+
+ if _IsSingletonList(testcases):
+ assert _NonStringIterable(testcases[0]), (
+ 'Single parameter argument must be a non-string iterable')
+ testcases = testcases[0]
+
+ return _Apply
+
+
+def parameters(*testcases): # pylint: disable=invalid-name
+ """A decorator for creating parameterized tests.
+
+ See the module docstring for a usage example.
+ Args:
+ *testcases: Parameters for the decorated method, either a single
+ iterable, or a list of tuples/dicts/objects (for tests
+ with only one argument).
+
+ Returns:
+ A test generator to be handled by TestGeneratorMetaclass.
+ """
+ return _ParameterDecorator(_ARGUMENT_REPR, testcases)
+
+
+def named_parameters(*testcases): # pylint: disable=invalid-name
+ """A decorator for creating parameterized tests.
+
+ See the module docstring for a usage example. The first element of
+ each parameter tuple should be a string and will be appended to the
+ name of the test method.
+
+ Args:
+ *testcases: Parameters for the decorated method, either a single
+ iterable, or a list of tuples.
+
+ Returns:
+ A test generator to be handled by TestGeneratorMetaclass.
+ """
+ return _ParameterDecorator(_FIRST_ARG, testcases)
+
+
+class TestGeneratorMetaclass(type):
+ """Metaclass for test cases with test generators.
+
+ A test generator is an iterable in a testcase that produces callables. These
+ callables must be single-argument methods. These methods are injected into
+ the class namespace and the original iterable is removed. If the name of the
+ iterable conforms to the test pattern, the injected methods will be picked
+ up as tests by the unittest framework.
+
+ In general, it is supposed to be used in conjunction with the
+ parameters decorator.
+ """
+
+ def __new__(mcs, class_name, bases, dct):
+ dct['_id_suffix'] = id_suffix = {}
+ for name, obj in dct.copy().items():
+ if (name.startswith(unittest.TestLoader.testMethodPrefix) and
+ _NonStringIterable(obj)):
+ iterator = iter(obj)
+ dct.pop(name)
+ _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator)
+
+ return type.__new__(mcs, class_name, bases, dct)
+
+
+def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator):
+ """Adds individual test cases to a dictionary.
+
+ Args:
+ dct: The target dictionary.
+ id_suffix: The dictionary for mapping names to test IDs.
+ name: The original name of the test case.
+ iterator: The iterator generating the individual test cases.
+ """
+ for idx, func in enumerate(iterator):
+ assert callable(func), 'Test generators must yield callables, got %r' % (
+ func,)
+ if getattr(func, '__x_use_name__', False):
+ new_name = func.__name__
+ else:
+ new_name = '%s%s%d' % (name, _SEPARATOR, idx)
+ assert new_name not in dct, (
+ 'Name of parameterized test case "%s" not unique' % (new_name,))
+ dct[new_name] = func
+ id_suffix[new_name] = getattr(func, '__x_extra_id__', '')
+
+
+class TestCase(unittest.TestCase, metaclass=TestGeneratorMetaclass):
+ """Base class for test cases using the parameters decorator."""
+
+ def _OriginalName(self):
+ return self._testMethodName.split(_SEPARATOR)[0]
+
+ def __str__(self):
+ return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__))
+
+ def id(self): # pylint: disable=invalid-name
+ """Returns the descriptive ID of the test.
+
+ This is used internally by the unittesting framework to get a name
+ for the test to be used in reports.
+
+ Returns:
+ The test id.
+ """
+ return '%s.%s%s' % (_StrClass(self.__class__),
+ self._OriginalName(),
+ self._id_suffix.get(self._testMethodName, ''))
+
+
+def CoopTestCase(other_base_class):
+ """Returns a new base class with a cooperative metaclass base.
+
+ This enables the TestCase to be used in combination
+ with other base classes that have custom metaclasses, such as
+ mox.MoxTestBase.
+
+ Only works with metaclasses that do not override type.__new__.
+
+ Example:
+
+ import google3
+ import mox
+
+ from google.protobuf.internal import _parameterized
+
+ class ExampleTest(parameterized.CoopTestCase(mox.MoxTestBase)):
+ ...
+
+ Args:
+ other_base_class: (class) A test case base class.
+
+ Returns:
+ A new class object.
+ """
+ metaclass = type(
+ 'CoopMetaclass',
+ (other_base_class.__metaclass__,
+ TestGeneratorMetaclass), {})
+ return metaclass(
+ 'CoopTestCase',
+ (other_base_class, TestCase), {})
diff --git a/Lib/site-packages/google/protobuf/internal/api_implementation.py b/Lib/site-packages/google/protobuf/internal/api_implementation.py
new file mode 100644
index 0000000..65caf63
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/internal/api_implementation.py
@@ -0,0 +1,140 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Determine which implementation of the protobuf API is used in this process.
+"""
+
+import importlib
+import os
+import sys
+import warnings
+
+
+def _ApiVersionToImplementationType(api_version):
+ if api_version == 2:
+ return 'cpp'
+ if api_version == 1:
+ raise ValueError('api_version=1 is no longer supported.')
+ if api_version == 0:
+ return 'python'
+ return None
+
+
+_implementation_type = None
+try:
+ # pylint: disable=g-import-not-at-top
+ from google.protobuf.internal import _api_implementation
+ # The compile-time constants in the _api_implementation module can be used to
+ # switch to a certain implementation of the Python API at build time.
+ _implementation_type = _ApiVersionToImplementationType(
+ _api_implementation.api_version)
+except ImportError:
+ pass # Unspecified by compiler flags.
+
+
+def _CanImport(mod_name):
+ try:
+ mod = importlib.import_module(mod_name)
+ # Work around a known issue in the classic bootstrap .par import hook.
+ if not mod:
+ raise ImportError(mod_name + ' import succeeded but was None')
+ return True
+ except ImportError:
+ return False
+
+
+if _implementation_type is None:
+ if _CanImport('google._upb._message'):
+ _implementation_type = 'upb'
+ elif _CanImport('google.protobuf.pyext._message'):
+ _implementation_type = 'cpp'
+ else:
+ _implementation_type = 'python'
+
+
+# This environment variable can be used to switch to a certain implementation
+# of the Python API, overriding the compile-time constants in the
+# _api_implementation module. Right now only 'python', 'cpp' and 'upb' are
+# valid values. Any other value will raise error.
+_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
+ _implementation_type)
+
+if _implementation_type not in ('python', 'cpp', 'upb'):
+ raise ValueError('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION {0} is not '
+ 'supported. Please set to \'python\', \'cpp\' or '
+ '\'upb\'.'.format(_implementation_type))
+
+if 'PyPy' in sys.version and _implementation_type == 'cpp':
+ warnings.warn('PyPy does not work yet with cpp protocol buffers. '
+ 'Falling back to the python implementation.')
+ _implementation_type = 'python'
+
+_c_module = None
+
+if _implementation_type == 'cpp':
+ try:
+ # pylint: disable=g-import-not-at-top
+ from google.protobuf.pyext import _message
+ sys.modules['google3.net.proto2.python.internal.cpp._message'] = _message
+ _c_module = _message
+ del _message
+ except ImportError:
+ # TODO: fail back to python
+ warnings.warn(
+ 'Selected implementation cpp is not available.')
+ pass
+
+if _implementation_type == 'upb':
+ try:
+ # pylint: disable=g-import-not-at-top
+ from google._upb import _message
+ _c_module = _message
+ del _message
+ except ImportError:
+ warnings.warn('Selected implementation upb is not available. '
+ 'Falling back to the python implementation.')
+ _implementation_type = 'python'
+ pass
+
+# Detect if serialization should be deterministic by default
+try:
+ # The presence of this module in a build allows the proto implementation to
+ # be upgraded merely via build deps.
+ #
+ # NOTE: Merely importing this automatically enables deterministic proto
+ # serialization for C++ code, but we still need to export it as a boolean so
+ # that we can do the same for `_implementation_type == 'python'`.
+ #
+ # NOTE2: It is possible for C++ code to enable deterministic serialization by
+ # default _without_ affecting Python code, if the C++ implementation is not in
+ # use by this module. That is intended behavior, so we don't actually expose
+ # this boolean outside of this module.
+ #
+ # pylint: disable=g-import-not-at-top,unused-import
+ from google.protobuf import enable_deterministic_proto_serialization
+ _python_deterministic_proto_serialization = True
+except ImportError:
+ _python_deterministic_proto_serialization = False
+
+
+# Usage of this function is discouraged. Clients shouldn't care which
+# implementation of the API is in use. Note that there is no guarantee
+# that differences between APIs will be maintained.
+# Please don't use this function if possible.
+def Type():
+ return _implementation_type
+
+
+# See comment on 'Type' above.
+# TODO: Remove the API, it returns a constant. b/228102101
+def Version():
+ return 2
+
+
+# For internal use only
+def IsPythonDefaultSerializationDeterministic():
+ return _python_deterministic_proto_serialization
diff --git a/Lib/site-packages/google/protobuf/internal/builder.py b/Lib/site-packages/google/protobuf/internal/builder.py
new file mode 100644
index 0000000..c22d994
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/internal/builder.py
@@ -0,0 +1,118 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Builds descriptors, message classes and services for generated _pb2.py.
+
+This file is only called in python generated _pb2.py files. It builds
+descriptors, message classes and services that users can directly use
+in generated code.
+"""
+
+__author__ = 'jieluo@google.com (Jie Luo)'
+
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf.internal import python_message
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+_sym_db = _symbol_database.Default()
+
+
+def BuildMessageAndEnumDescriptors(file_des, module):
+ """Builds message and enum descriptors.
+
+ Args:
+ file_des: FileDescriptor of the .proto file
+ module: Generated _pb2 module
+ """
+
+ def BuildNestedDescriptors(msg_des, prefix):
+ for (name, nested_msg) in msg_des.nested_types_by_name.items():
+ module_name = prefix + name.upper()
+ module[module_name] = nested_msg
+ BuildNestedDescriptors(nested_msg, module_name + '_')
+ for enum_des in msg_des.enum_types:
+ module[prefix + enum_des.name.upper()] = enum_des
+
+ for (name, msg_des) in file_des.message_types_by_name.items():
+ module_name = '_' + name.upper()
+ module[module_name] = msg_des
+ BuildNestedDescriptors(msg_des, module_name + '_')
+
+
+def BuildTopDescriptorsAndMessages(file_des, module_name, module):
+ """Builds top level descriptors and message classes.
+
+ Args:
+ file_des: FileDescriptor of the .proto file
+ module_name: str, the name of generated _pb2 module
+ module: Generated _pb2 module
+ """
+
+ def BuildMessage(msg_des):
+ create_dict = {}
+ for (name, nested_msg) in msg_des.nested_types_by_name.items():
+ create_dict[name] = BuildMessage(nested_msg)
+ create_dict['DESCRIPTOR'] = msg_des
+ create_dict['__module__'] = module_name
+ message_class = _reflection.GeneratedProtocolMessageType(
+ msg_des.name, (_message.Message,), create_dict)
+ _sym_db.RegisterMessage(message_class)
+ return message_class
+
+ # top level enums
+ for (name, enum_des) in file_des.enum_types_by_name.items():
+ module['_' + name.upper()] = enum_des
+ module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des)
+ for enum_value in enum_des.values:
+ module[enum_value.name] = enum_value.number
+
+ # top level extensions
+ for (name, extension_des) in file_des.extensions_by_name.items():
+ module[name.upper() + '_FIELD_NUMBER'] = extension_des.number
+ module[name] = extension_des
+
+ # services
+ for (name, service) in file_des.services_by_name.items():
+ module['_' + name.upper()] = service
+
+ # Build messages.
+ for (name, msg_des) in file_des.message_types_by_name.items():
+ module[name] = BuildMessage(msg_des)
+
+
+def AddHelpersToExtensions(file_des):
+ """no-op to keep old generated code work with new runtime.
+
+ Args:
+ file_des: FileDescriptor of the .proto file
+ """
+ # TODO: Remove this on-op
+ return
+
+
+def BuildServices(file_des, module_name, module):
+ """Builds services classes and services stub class.
+
+ Args:
+ file_des: FileDescriptor of the .proto file
+ module_name: str, the name of generated _pb2 module
+ module: Generated _pb2 module
+ """
+ # pylint: disable=g-import-not-at-top
+ from google.protobuf import service as _service
+ from google.protobuf import service_reflection
+ # pylint: enable=g-import-not-at-top
+ for (name, service) in file_des.services_by_name.items():
+ module[name] = service_reflection.GeneratedServiceType(
+ name, (_service.Service,),
+ dict(DESCRIPTOR=service, __module__=module_name))
+ stub_name = name + '_Stub'
+ module[stub_name] = service_reflection.GeneratedServiceStubType(
+ stub_name, (module[name],),
+ dict(DESCRIPTOR=service, __module__=module_name))
diff --git a/Lib/site-packages/google/protobuf/internal/containers.py b/Lib/site-packages/google/protobuf/internal/containers.py
new file mode 100644
index 0000000..20c6d98
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/internal/containers.py
@@ -0,0 +1,687 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Contains container classes to represent different protocol buffer types.
+
+This file defines container classes which represent categories of protocol
+buffer field types which need extra maintenance. Currently these categories
+are:
+
+- Repeated scalar fields - These are all repeated fields which aren't
+ composite (e.g. they are of simple types like int32, string, etc).
+- Repeated composite fields - Repeated fields which are composite. This
+ includes groups and nested messages.
+"""
+
+import collections.abc
+import copy
+import pickle
+from typing import (
+ Any,
+ Iterable,
+ Iterator,
+ List,
+ MutableMapping,
+ MutableSequence,
+ NoReturn,
+ Optional,
+ Sequence,
+ TypeVar,
+ Union,
+ overload,
+)
+
+
+_T = TypeVar('_T')
+_K = TypeVar('_K')
+_V = TypeVar('_V')
+
+
+class BaseContainer(Sequence[_T]):
+ """Base container class."""
+
+ # Minimizes memory usage and disallows assignment to other attributes.
+ __slots__ = ['_message_listener', '_values']
+
+ def __init__(self, message_listener: Any) -> None:
+ """
+ Args:
+ message_listener: A MessageListener implementation.
+ The RepeatedScalarFieldContainer will call this object's
+ Modified() method when it is modified.
+ """
+ self._message_listener = message_listener
+ self._values = []
+
+ @overload
+ def __getitem__(self, key: int) -> _T:
+ ...
+
+ @overload
+ def __getitem__(self, key: slice) -> List[_T]:
+ ...
+
+ def __getitem__(self, key):
+ """Retrieves item by the specified key."""
+ return self._values[key]
+
+ def __len__(self) -> int:
+ """Returns the number of elements in the container."""
+ return len(self._values)
+
+ def __ne__(self, other: Any) -> bool:
+ """Checks if another instance isn't equal to this one."""
+ # The concrete classes should define __eq__.
+ return not self == other
+
+ __hash__ = None
+
+ def __repr__(self) -> str:
+ return repr(self._values)
+
+ def sort(self, *args, **kwargs) -> None:
+ # Continue to support the old sort_function keyword argument.
+ # This is expected to be a rare occurrence, so use LBYL to avoid
+ # the overhead of actually catching KeyError.
+ if 'sort_function' in kwargs:
+ kwargs['cmp'] = kwargs.pop('sort_function')
+ self._values.sort(*args, **kwargs)
+
+ def reverse(self) -> None:
+ self._values.reverse()
+
+
+# TODO: Remove this. BaseContainer does *not* conform to
+# MutableSequence, only its subclasses do.
+collections.abc.MutableSequence.register(BaseContainer)
+
+
+class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]):
+ """Simple, type-checked, list-like container for holding repeated scalars."""
+
+ # Disallows assignment to other attributes.
+ __slots__ = ['_type_checker']
+
+ def __init__(
+ self,
+ message_listener: Any,
+ type_checker: Any,
+ ) -> None:
+ """Args:
+
+ message_listener: A MessageListener implementation. The
+ RepeatedScalarFieldContainer will call this object's Modified() method
+ when it is modified.
+ type_checker: A type_checkers.ValueChecker instance to run on elements
+ inserted into this container.
+ """
+ super().__init__(message_listener)
+ self._type_checker = type_checker
+
+ def append(self, value: _T) -> None:
+ """Appends an item to the list. Similar to list.append()."""
+ self._values.append(self._type_checker.CheckValue(value))
+ if not self._message_listener.dirty:
+ self._message_listener.Modified()
+
+ def insert(self, key: int, value: _T) -> None:
+ """Inserts the item at the specified position. Similar to list.insert()."""
+ self._values.insert(key, self._type_checker.CheckValue(value))
+ if not self._message_listener.dirty:
+ self._message_listener.Modified()
+
+ def extend(self, elem_seq: Iterable[_T]) -> None:
+ """Extends by appending the given iterable. Similar to list.extend()."""
+# TODO: Change OSS to raise error too
+ if elem_seq is None:
+ return
+ try:
+ elem_seq_iter = iter(elem_seq)
+ except TypeError:
+ if not elem_seq:
+ warnings.warn('Value is not iterable. Please remove the wrong '
+ 'usage. This will be changed to raise TypeError soon.')
+ return
+ raise
+ new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter]
+ if new_values:
+ self._values.extend(new_values)
+ self._message_listener.Modified()
+
+ def MergeFrom(
+ self,
+ other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]],
+ ) -> None:
+ """Appends the contents of another repeated field of the same type to this
+ one. We do not check the types of the individual fields.
+ """
+ self._values.extend(other)
+ self._message_listener.Modified()
+
+ def remove(self, elem: _T):
+ """Removes an item from the list. Similar to list.remove()."""
+ self._values.remove(elem)
+ self._message_listener.Modified()
+
+ def pop(self, key: Optional[int] = -1) -> _T:
+ """Removes and returns an item at a given index. Similar to list.pop()."""
+ value = self._values[key]
+ self.__delitem__(key)
+ return value
+
+ @overload
+ def __setitem__(self, key: int, value: _T) -> None:
+ ...
+
+ @overload
+ def __setitem__(self, key: slice, value: Iterable[_T]) -> None:
+ ...
+
+ def __setitem__(self, key, value) -> None:
+ """Sets the item on the specified position."""
+ if isinstance(key, slice):
+ if key.step is not None:
+ raise ValueError('Extended slices not supported')
+ self._values[key] = map(self._type_checker.CheckValue, value)
+ self._message_listener.Modified()
+ else:
+ self._values[key] = self._type_checker.CheckValue(value)
+ self._message_listener.Modified()
+
+ def __delitem__(self, key: Union[int, slice]) -> None:
+ """Deletes the item at the specified position."""
+ del self._values[key]
+ self._message_listener.Modified()
+
+ def __eq__(self, other: Any) -> bool:
+ """Compares the current instance with another one."""
+ if self is other:
+ return True
+ # Special case for the same type which should be common and fast.
+ if isinstance(other, self.__class__):
+ return other._values == self._values
+ # We are presumably comparing against some other sequence type.
+ return other == self._values
+
+ def __deepcopy__(
+ self,
+ unused_memo: Any = None,
+ ) -> 'RepeatedScalarFieldContainer[_T]':
+ clone = RepeatedScalarFieldContainer(
+ copy.deepcopy(self._message_listener), self._type_checker)
+ clone.MergeFrom(self)
+ return clone
+
+ def __reduce__(self, **kwargs) -> NoReturn:
+ raise pickle.PickleError(
+ "Can't pickle repeated scalar fields, convert to list first")
+
+
+# TODO: Constrain T to be a subtype of Message.
+class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]):
+ """Simple, list-like container for holding repeated composite fields."""
+
+ # Disallows assignment to other attributes.
+ __slots__ = ['_message_descriptor']
+
+ def __init__(self, message_listener: Any, message_descriptor: Any) -> None:
+ """
+ Note that we pass in a descriptor instead of the generated directly,
+ since at the time we construct a _RepeatedCompositeFieldContainer we
+ haven't yet necessarily initialized the type that will be contained in the
+ container.
+
+ Args:
+ message_listener: A MessageListener implementation.
+ The RepeatedCompositeFieldContainer will call this object's
+ Modified() method when it is modified.
+ message_descriptor: A Descriptor instance describing the protocol type
+ that should be present in this container. We'll use the
+ _concrete_class field of this descriptor when the client calls add().
+ """
+ super().__init__(message_listener)
+ self._message_descriptor = message_descriptor
+
+ def add(self, **kwargs: Any) -> _T:
+ """Adds a new element at the end of the list and returns it. Keyword
+ arguments may be used to initialize the element.
+ """
+ new_element = self._message_descriptor._concrete_class(**kwargs)
+ new_element._SetListener(self._message_listener)
+ self._values.append(new_element)
+ if not self._message_listener.dirty:
+ self._message_listener.Modified()
+ return new_element
+
+ def append(self, value: _T) -> None:
+ """Appends one element by copying the message."""
+ new_element = self._message_descriptor._concrete_class()
+ new_element._SetListener(self._message_listener)
+ new_element.CopyFrom(value)
+ self._values.append(new_element)
+ if not self._message_listener.dirty:
+ self._message_listener.Modified()
+
+ def insert(self, key: int, value: _T) -> None:
+ """Inserts the item at the specified position by copying."""
+ new_element = self._message_descriptor._concrete_class()
+ new_element._SetListener(self._message_listener)
+ new_element.CopyFrom(value)
+ self._values.insert(key, new_element)
+ if not self._message_listener.dirty:
+ self._message_listener.Modified()
+
+ def extend(self, elem_seq: Iterable[_T]) -> None:
+ """Extends by appending the given sequence of elements of the same type
+
+ as this one, copying each individual message.
+ """
+ message_class = self._message_descriptor._concrete_class
+ listener = self._message_listener
+ values = self._values
+ for message in elem_seq:
+ new_element = message_class()
+ new_element._SetListener(listener)
+ new_element.MergeFrom(message)
+ values.append(new_element)
+ listener.Modified()
+
+ def MergeFrom(
+ self,
+ other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]],
+ ) -> None:
+ """Appends the contents of another repeated field of the same type to this
+ one, copying each individual message.
+ """
+ self.extend(other)
+
+ def remove(self, elem: _T) -> None:
+ """Removes an item from the list. Similar to list.remove()."""
+ self._values.remove(elem)
+ self._message_listener.Modified()
+
+ def pop(self, key: Optional[int] = -1) -> _T:
+ """Removes and returns an item at a given index. Similar to list.pop()."""
+ value = self._values[key]
+ self.__delitem__(key)
+ return value
+
+ @overload
+ def __setitem__(self, key: int, value: _T) -> None:
+ ...
+
+ @overload
+ def __setitem__(self, key: slice, value: Iterable[_T]) -> None:
+ ...
+
+ def __setitem__(self, key, value):
+ # This method is implemented to make RepeatedCompositeFieldContainer
+ # structurally compatible with typing.MutableSequence. It is
+ # otherwise unsupported and will always raise an error.
+ raise TypeError(
+ f'{self.__class__.__name__} object does not support item assignment')
+
+ def __delitem__(self, key: Union[int, slice]) -> None:
+ """Deletes the item at the specified position."""
+ del self._values[key]
+ self._message_listener.Modified()
+
+ def __eq__(self, other: Any) -> bool:
+ """Compares the current instance with another one."""
+ if self is other:
+ return True
+ if not isinstance(other, self.__class__):
+ raise TypeError('Can only compare repeated composite fields against '
+ 'other repeated composite fields.')
+ return self._values == other._values
+
+
+class ScalarMap(MutableMapping[_K, _V]):
+ """Simple, type-checked, dict-like container for holding repeated scalars."""
+
+ # Disallows assignment to other attributes.
+ __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener',
+ '_entry_descriptor']
+
+ def __init__(
+ self,
+ message_listener: Any,
+ key_checker: Any,
+ value_checker: Any,
+ entry_descriptor: Any,
+ ) -> None:
+ """
+ Args:
+ message_listener: A MessageListener implementation.
+ The ScalarMap will call this object's Modified() method when it
+ is modified.
+ key_checker: A type_checkers.ValueChecker instance to run on keys
+ inserted into this container.
+ value_checker: A type_checkers.ValueChecker instance to run on values
+ inserted into this container.
+ entry_descriptor: The MessageDescriptor of a map entry: key and value.
+ """
+ self._message_listener = message_listener
+ self._key_checker = key_checker
+ self._value_checker = value_checker
+ self._entry_descriptor = entry_descriptor
+ self._values = {}
+
+ def __getitem__(self, key: _K) -> _V:
+ try:
+ return self._values[key]
+ except KeyError:
+ key = self._key_checker.CheckValue(key)
+ val = self._value_checker.DefaultValue()
+ self._values[key] = val
+ return val
+
+ def __contains__(self, item: _K) -> bool:
+ # We check the key's type to match the strong-typing flavor of the API.
+ # Also this makes it easier to match the behavior of the C++ implementation.
+ self._key_checker.CheckValue(item)
+ return item in self._values
+
+ @overload
+ def get(self, key: _K) -> Optional[_V]:
+ ...
+
+ @overload
+ def get(self, key: _K, default: _T) -> Union[_V, _T]:
+ ...
+
+ # We need to override this explicitly, because our defaultdict-like behavior
+ # will make the default implementation (from our base class) always insert
+ # the key.
+ def get(self, key, default=None):
+ if key in self:
+ return self[key]
+ else:
+ return default
+
+ def __setitem__(self, key: _K, value: _V) -> _T:
+ checked_key = self._key_checker.CheckValue(key)
+ checked_value = self._value_checker.CheckValue(value)
+ self._values[checked_key] = checked_value
+ self._message_listener.Modified()
+
+ def __delitem__(self, key: _K) -> None:
+ del self._values[key]
+ self._message_listener.Modified()
+
+ def __len__(self) -> int:
+ return len(self._values)
+
+ def __iter__(self) -> Iterator[_K]:
+ return iter(self._values)
+
+ def __repr__(self) -> str:
+ return repr(self._values)
+
+ def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None:
+ self._values.update(other._values)
+ self._message_listener.Modified()
+
+ def InvalidateIterators(self) -> None:
+ # It appears that the only way to reliably invalidate iterators to
+ # self._values is to ensure that its size changes.
+ original = self._values
+ self._values = original.copy()
+ original[None] = None
+
+ # This is defined in the abstract base, but we can do it much more cheaply.
+ def clear(self) -> None:
+ self._values.clear()
+ self._message_listener.Modified()
+
+ def GetEntryClass(self) -> Any:
+ return self._entry_descriptor._concrete_class
+
+
+class MessageMap(MutableMapping[_K, _V]):
+ """Simple, type-checked, dict-like container for with submessage values."""
+
+ # Disallows assignment to other attributes.
+ __slots__ = ['_key_checker', '_values', '_message_listener',
+ '_message_descriptor', '_entry_descriptor']
+
+ def __init__(
+ self,
+ message_listener: Any,
+ message_descriptor: Any,
+ key_checker: Any,
+ entry_descriptor: Any,
+ ) -> None:
+ """
+ Args:
+ message_listener: A MessageListener implementation.
+ The ScalarMap will call this object's Modified() method when it
+ is modified.
+ key_checker: A type_checkers.ValueChecker instance to run on keys
+ inserted into this container.
+ value_checker: A type_checkers.ValueChecker instance to run on values
+ inserted into this container.
+ entry_descriptor: The MessageDescriptor of a map entry: key and value.
+ """
+ self._message_listener = message_listener
+ self._message_descriptor = message_descriptor
+ self._key_checker = key_checker
+ self._entry_descriptor = entry_descriptor
+ self._values = {}
+
+ def __getitem__(self, key: _K) -> _V:
+ key = self._key_checker.CheckValue(key)
+ try:
+ return self._values[key]
+ except KeyError:
+ new_element = self._message_descriptor._concrete_class()
+ new_element._SetListener(self._message_listener)
+ self._values[key] = new_element
+ self._message_listener.Modified()
+ return new_element
+
+ def get_or_create(self, key: _K) -> _V:
+ """get_or_create() is an alias for getitem (ie. map[key]).
+
+ Args:
+ key: The key to get or create in the map.
+
+ This is useful in cases where you want to be explicit that the call is
+ mutating the map. This can avoid lint errors for statements like this
+ that otherwise would appear to be pointless statements:
+
+ msg.my_map[key]
+ """
+ return self[key]
+
+ @overload
+ def get(self, key: _K) -> Optional[_V]:
+ ...
+
+ @overload
+ def get(self, key: _K, default: _T) -> Union[_V, _T]:
+ ...
+
+ # We need to override this explicitly, because our defaultdict-like behavior
+ # will make the default implementation (from our base class) always insert
+ # the key.
+ def get(self, key, default=None):
+ if key in self:
+ return self[key]
+ else:
+ return default
+
+ def __contains__(self, item: _K) -> bool:
+ item = self._key_checker.CheckValue(item)
+ return item in self._values
+
+ def __setitem__(self, key: _K, value: _V) -> NoReturn:
+ raise ValueError('May not set values directly, call my_map[key].foo = 5')
+
+ def __delitem__(self, key: _K) -> None:
+ key = self._key_checker.CheckValue(key)
+ del self._values[key]
+ self._message_listener.Modified()
+
+ def __len__(self) -> int:
+ return len(self._values)
+
+ def __iter__(self) -> Iterator[_K]:
+ return iter(self._values)
+
+ def __repr__(self) -> str:
+ return repr(self._values)
+
+ def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None:
+ # pylint: disable=protected-access
+ for key in other._values:
+ # According to documentation: "When parsing from the wire or when merging,
+ # if there are duplicate map keys the last key seen is used".
+ if key in self:
+ del self[key]
+ self[key].CopyFrom(other[key])
+ # self._message_listener.Modified() not required here, because
+ # mutations to submessages already propagate.
+
+ def InvalidateIterators(self) -> None:
+ # It appears that the only way to reliably invalidate iterators to
+ # self._values is to ensure that its size changes.
+ original = self._values
+ self._values = original.copy()
+ original[None] = None
+
+ # This is defined in the abstract base, but we can do it much more cheaply.
+ def clear(self) -> None:
+ self._values.clear()
+ self._message_listener.Modified()
+
+ def GetEntryClass(self) -> Any:
+ return self._entry_descriptor._concrete_class
+
+
+class _UnknownField:
+ """A parsed unknown field."""
+
+ # Disallows assignment to other attributes.
+ __slots__ = ['_field_number', '_wire_type', '_data']
+
+ def __init__(self, field_number, wire_type, data):
+ self._field_number = field_number
+ self._wire_type = wire_type
+ self._data = data
+ return
+
+ def __lt__(self, other):
+ # pylint: disable=protected-access
+ return self._field_number < other._field_number
+
+ def __eq__(self, other):
+ if self is other:
+ return True
+ # pylint: disable=protected-access
+ return (self._field_number == other._field_number and
+ self._wire_type == other._wire_type and
+ self._data == other._data)
+
+
+class UnknownFieldRef: # pylint: disable=missing-class-docstring
+
+ def __init__(self, parent, index):
+ self._parent = parent
+ self._index = index
+
+ def _check_valid(self):
+ if not self._parent:
+ raise ValueError('UnknownField does not exist. '
+ 'The parent message might be cleared.')
+ if self._index >= len(self._parent):
+ raise ValueError('UnknownField does not exist. '
+ 'The parent message might be cleared.')
+
+ @property
+ def field_number(self):
+ self._check_valid()
+ # pylint: disable=protected-access
+ return self._parent._internal_get(self._index)._field_number
+
+ @property
+ def wire_type(self):
+ self._check_valid()
+ # pylint: disable=protected-access
+ return self._parent._internal_get(self._index)._wire_type
+
+ @property
+ def data(self):
+ self._check_valid()
+ # pylint: disable=protected-access
+ return self._parent._internal_get(self._index)._data
+
+
+class UnknownFieldSet:
+ """UnknownField container"""
+
+ # Disallows assignment to other attributes.
+ __slots__ = ['_values']
+
+ def __init__(self):
+ self._values = []
+
+ def __getitem__(self, index):
+ if self._values is None:
+ raise ValueError('UnknownFields does not exist. '
+ 'The parent message might be cleared.')
+ size = len(self._values)
+ if index < 0:
+ index += size
+ if index < 0 or index >= size:
+ raise IndexError('index %d out of range'.index)
+
+ return UnknownFieldRef(self, index)
+
+ def _internal_get(self, index):
+ return self._values[index]
+
+ def __len__(self):
+ if self._values is None:
+ raise ValueError('UnknownFields does not exist. '
+ 'The parent message might be cleared.')
+ return len(self._values)
+
+ def _add(self, field_number, wire_type, data):
+ unknown_field = _UnknownField(field_number, wire_type, data)
+ self._values.append(unknown_field)
+ return unknown_field
+
+ def __iter__(self):
+ for i in range(len(self)):
+ yield UnknownFieldRef(self, i)
+
+ def _extend(self, other):
+ if other is None:
+ return
+ # pylint: disable=protected-access
+ self._values.extend(other._values)
+
+ def __eq__(self, other):
+ if self is other:
+ return True
+ # Sort unknown fields because their order shouldn't
+ # affect equality test.
+ values = list(self._values)
+ if other is None:
+ return not values
+ values.sort()
+ # pylint: disable=protected-access
+ other_values = sorted(other._values)
+ return values == other_values
+
+ def _clear(self):
+ for value in self._values:
+ # pylint: disable=protected-access
+ if isinstance(value._data, UnknownFieldSet):
+ value._data._clear() # pylint: disable=protected-access
+ self._values = None
diff --git a/Lib/site-packages/google/protobuf/internal/decoder.py b/Lib/site-packages/google/protobuf/internal/decoder.py
new file mode 100644
index 0000000..acb91aa
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/internal/decoder.py
@@ -0,0 +1,1044 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Code for decoding protocol buffer primitives.
+
+This code is very similar to encoder.py -- read the docs for that module first.
+
+A "decoder" is a function with the signature:
+ Decode(buffer, pos, end, message, field_dict)
+The arguments are:
+ buffer: The string containing the encoded message.
+ pos: The current position in the string.
+ end: The position in the string where the current message ends. May be
+ less than len(buffer) if we're reading a sub-message.
+ message: The message object into which we're parsing.
+ field_dict: message._fields (avoids a hashtable lookup).
+The decoder reads the field and stores it into field_dict, returning the new
+buffer position. A decoder for a repeated field may proactively decode all of
+the elements of that field, if they appear consecutively.
+
+Note that decoders may throw any of the following:
+ IndexError: Indicates a truncated message.
+ struct.error: Unpacking of a fixed-width field failed.
+ message.DecodeError: Other errors.
+
+Decoders are expected to raise an exception if they are called with pos > end.
+This allows callers to be lax about bounds checking: it's fineto read past
+"end" as long as you are sure that someone else will notice and throw an
+exception later on.
+
+Something up the call stack is expected to catch IndexError and struct.error
+and convert them to message.DecodeError.
+
+Decoders are constructed using decoder constructors with the signature:
+ MakeDecoder(field_number, is_repeated, is_packed, key, new_default)
+The arguments are:
+ field_number: The field number of the field we want to decode.
+ is_repeated: Is the field a repeated field? (bool)
+ is_packed: Is the field a packed field? (bool)
+ key: The key to use when looking up the field within field_dict.
+ (This is actually the FieldDescriptor but nothing in this
+ file should depend on that.)
+ new_default: A function which takes a message object as a parameter and
+ returns a new instance of the default value for this field.
+ (This is called for repeated fields and sub-messages, when an
+ instance does not already exist.)
+
+As with encoders, we define a decoder constructor for every type of field.
+Then, for every field of every message class we construct an actual decoder.
+That decoder goes into a dict indexed by tag, so when we decode a message
+we repeatedly read a tag, look up the corresponding decoder, and invoke it.
+"""
+
+__author__ = 'kenton@google.com (Kenton Varda)'
+
+import math
+import struct
+
+from google.protobuf.internal import containers
+from google.protobuf.internal import encoder
+from google.protobuf.internal import wire_format
+from google.protobuf import message
+
+
+# This is not for optimization, but rather to avoid conflicts with local
+# variables named "message".
+_DecodeError = message.DecodeError
+
+
+def _VarintDecoder(mask, result_type):
+ """Return an encoder for a basic varint value (does not include tag).
+
+ Decoded values will be bitwise-anded with the given mask before being
+ returned, e.g. to limit them to 32 bits. The returned decoder does not
+ take the usual "end" parameter -- the caller is expected to do bounds checking
+ after the fact (often the caller can defer such checking until later). The
+ decoder returns a (value, new_pos) pair.
+ """
+
+ def DecodeVarint(buffer, pos):
+ result = 0
+ shift = 0
+ while 1:
+ b = buffer[pos]
+ result |= ((b & 0x7f) << shift)
+ pos += 1
+ if not (b & 0x80):
+ result &= mask
+ result = result_type(result)
+ return (result, pos)
+ shift += 7
+ if shift >= 64:
+ raise _DecodeError('Too many bytes when decoding varint.')
+ return DecodeVarint
+
+
+def _SignedVarintDecoder(bits, result_type):
+ """Like _VarintDecoder() but decodes signed values."""
+
+ signbit = 1 << (bits - 1)
+ mask = (1 << bits) - 1
+
+ def DecodeVarint(buffer, pos):
+ result = 0
+ shift = 0
+ while 1:
+ b = buffer[pos]
+ result |= ((b & 0x7f) << shift)
+ pos += 1
+ if not (b & 0x80):
+ result &= mask
+ result = (result ^ signbit) - signbit
+ result = result_type(result)
+ return (result, pos)
+ shift += 7
+ if shift >= 64:
+ raise _DecodeError('Too many bytes when decoding varint.')
+ return DecodeVarint
+
+# All 32-bit and 64-bit values are represented as int.
+_DecodeVarint = _VarintDecoder((1 << 64) - 1, int)
+_DecodeSignedVarint = _SignedVarintDecoder(64, int)
+
+# Use these versions for values which must be limited to 32 bits.
+_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int)
+_DecodeSignedVarint32 = _SignedVarintDecoder(32, int)
+
+
+def ReadTag(buffer, pos):
+ """Read a tag from the memoryview, and return a (tag_bytes, new_pos) tuple.
+
+ We return the raw bytes of the tag rather than decoding them. The raw
+ bytes can then be used to look up the proper decoder. This effectively allows
+ us to trade some work that would be done in pure-python (decoding a varint)
+ for work that is done in C (searching for a byte string in a hash table).
+ In a low-level language it would be much cheaper to decode the varint and
+ use that, but not in Python.
+
+ Args:
+ buffer: memoryview object of the encoded bytes
+ pos: int of the current position to start from
+
+ Returns:
+ Tuple[bytes, int] of the tag data and new position.
+ """
+ start = pos
+ while buffer[pos] & 0x80:
+ pos += 1
+ pos += 1
+
+ tag_bytes = buffer[start:pos].tobytes()
+ return tag_bytes, pos
+
+
+# --------------------------------------------------------------------
+
+
+def _SimpleDecoder(wire_type, decode_value):
+ """Return a constructor for a decoder for fields of a particular type.
+
+ Args:
+ wire_type: The field's wire type.
+ decode_value: A function which decodes an individual value, e.g.
+ _DecodeVarint()
+ """
+
+ def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default,
+ clear_if_default=False):
+ if is_packed:
+ local_DecodeVarint = _DecodeVarint
+ def DecodePackedField(buffer, pos, end, message, field_dict):
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ (endpoint, pos) = local_DecodeVarint(buffer, pos)
+ endpoint += pos
+ if endpoint > end:
+ raise _DecodeError('Truncated message.')
+ while pos < endpoint:
+ (element, pos) = decode_value(buffer, pos)
+ value.append(element)
+ if pos > endpoint:
+ del value[-1] # Discard corrupt value.
+ raise _DecodeError('Packed element was truncated.')
+ return pos
+ return DecodePackedField
+ elif is_repeated:
+ tag_bytes = encoder.TagBytes(field_number, wire_type)
+ tag_len = len(tag_bytes)
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ while 1:
+ (element, new_pos) = decode_value(buffer, pos)
+ value.append(element)
+ # Predict that the next tag is another copy of the same repeated
+ # field.
+ pos = new_pos + tag_len
+ if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
+ # Prediction failed. Return.
+ if new_pos > end:
+ raise _DecodeError('Truncated message.')
+ return new_pos
+ return DecodeRepeatedField
+ else:
+ def DecodeField(buffer, pos, end, message, field_dict):
+ (new_value, pos) = decode_value(buffer, pos)
+ if pos > end:
+ raise _DecodeError('Truncated message.')
+ if clear_if_default and not new_value:
+ field_dict.pop(key, None)
+ else:
+ field_dict[key] = new_value
+ return pos
+ return DecodeField
+
+ return SpecificDecoder
+
+
+def _ModifiedDecoder(wire_type, decode_value, modify_value):
+ """Like SimpleDecoder but additionally invokes modify_value on every value
+ before storing it. Usually modify_value is ZigZagDecode.
+ """
+
+ # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
+ # not enough to make a significant difference.
+
+ def InnerDecode(buffer, pos):
+ (result, new_pos) = decode_value(buffer, pos)
+ return (modify_value(result), new_pos)
+ return _SimpleDecoder(wire_type, InnerDecode)
+
+
+def _StructPackDecoder(wire_type, format):
+ """Return a constructor for a decoder for a fixed-width field.
+
+ Args:
+ wire_type: The field's wire type.
+ format: The format string to pass to struct.unpack().
+ """
+
+ value_size = struct.calcsize(format)
+ local_unpack = struct.unpack
+
+ # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
+ # not enough to make a significant difference.
+
+ # Note that we expect someone up-stack to catch struct.error and convert
+ # it to _DecodeError -- this way we don't have to set up exception-
+ # handling blocks every time we parse one value.
+
+ def InnerDecode(buffer, pos):
+ new_pos = pos + value_size
+ result = local_unpack(format, buffer[pos:new_pos])[0]
+ return (result, new_pos)
+ return _SimpleDecoder(wire_type, InnerDecode)
+
+
+def _FloatDecoder():
+ """Returns a decoder for a float field.
+
+ This code works around a bug in struct.unpack for non-finite 32-bit
+ floating-point values.
+ """
+
+ local_unpack = struct.unpack
+
+ def InnerDecode(buffer, pos):
+ """Decode serialized float to a float and new position.
+
+ Args:
+ buffer: memoryview of the serialized bytes
+ pos: int, position in the memory view to start at.
+
+ Returns:
+ Tuple[float, int] of the deserialized float value and new position
+ in the serialized data.
+ """
+ # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign
+ # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand.
+ new_pos = pos + 4
+ float_bytes = buffer[pos:new_pos].tobytes()
+
+ # If this value has all its exponent bits set, then it's non-finite.
+ # In Python 2.4, struct.unpack will convert it to a finite 64-bit value.
+ # To avoid that, we parse it specially.
+ if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'):
+ # If at least one significand bit is set...
+ if float_bytes[0:3] != b'\x00\x00\x80':
+ return (math.nan, new_pos)
+ # If sign bit is set...
+ if float_bytes[3:4] == b'\xFF':
+ return (-math.inf, new_pos)
+ return (math.inf, new_pos)
+
+ # Note that we expect someone up-stack to catch struct.error and convert
+ # it to _DecodeError -- this way we don't have to set up exception-
+ # handling blocks every time we parse one value.
+ result = local_unpack('= b'\xF0')
+ and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')):
+ return (math.nan, new_pos)
+
+ # Note that we expect someone up-stack to catch struct.error and convert
+ # it to _DecodeError -- this way we don't have to set up exception-
+ # handling blocks every time we parse one value.
+ result = local_unpack(' end:
+ raise _DecodeError('Truncated message.')
+ while pos < endpoint:
+ value_start_pos = pos
+ (element, pos) = _DecodeSignedVarint32(buffer, pos)
+ # pylint: disable=protected-access
+ if element in enum_type.values_by_number:
+ value.append(element)
+ else:
+ if not message._unknown_fields:
+ message._unknown_fields = []
+ tag_bytes = encoder.TagBytes(field_number,
+ wire_format.WIRETYPE_VARINT)
+
+ message._unknown_fields.append(
+ (tag_bytes, buffer[value_start_pos:pos].tobytes()))
+ if message._unknown_field_set is None:
+ message._unknown_field_set = containers.UnknownFieldSet()
+ message._unknown_field_set._add(
+ field_number, wire_format.WIRETYPE_VARINT, element)
+ # pylint: enable=protected-access
+ if pos > endpoint:
+ if element in enum_type.values_by_number:
+ del value[-1] # Discard corrupt value.
+ else:
+ del message._unknown_fields[-1]
+ # pylint: disable=protected-access
+ del message._unknown_field_set._values[-1]
+ # pylint: enable=protected-access
+ raise _DecodeError('Packed element was truncated.')
+ return pos
+ return DecodePackedField
+ elif is_repeated:
+ tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT)
+ tag_len = len(tag_bytes)
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+ """Decode serialized repeated enum to its value and a new position.
+
+ Args:
+ buffer: memoryview of the serialized bytes.
+ pos: int, position in the memory view to start at.
+ end: int, end position of serialized data
+ message: Message object to store unknown fields in
+ field_dict: Map[Descriptor, Any] to store decoded values in.
+
+ Returns:
+ int, new position in serialized data.
+ """
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ while 1:
+ (element, new_pos) = _DecodeSignedVarint32(buffer, pos)
+ # pylint: disable=protected-access
+ if element in enum_type.values_by_number:
+ value.append(element)
+ else:
+ if not message._unknown_fields:
+ message._unknown_fields = []
+ message._unknown_fields.append(
+ (tag_bytes, buffer[pos:new_pos].tobytes()))
+ if message._unknown_field_set is None:
+ message._unknown_field_set = containers.UnknownFieldSet()
+ message._unknown_field_set._add(
+ field_number, wire_format.WIRETYPE_VARINT, element)
+ # pylint: enable=protected-access
+ # Predict that the next tag is another copy of the same repeated
+ # field.
+ pos = new_pos + tag_len
+ if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
+ # Prediction failed. Return.
+ if new_pos > end:
+ raise _DecodeError('Truncated message.')
+ return new_pos
+ return DecodeRepeatedField
+ else:
+ def DecodeField(buffer, pos, end, message, field_dict):
+ """Decode serialized repeated enum to its value and a new position.
+
+ Args:
+ buffer: memoryview of the serialized bytes.
+ pos: int, position in the memory view to start at.
+ end: int, end position of serialized data
+ message: Message object to store unknown fields in
+ field_dict: Map[Descriptor, Any] to store decoded values in.
+
+ Returns:
+ int, new position in serialized data.
+ """
+ value_start_pos = pos
+ (enum_value, pos) = _DecodeSignedVarint32(buffer, pos)
+ if pos > end:
+ raise _DecodeError('Truncated message.')
+ if clear_if_default and not enum_value:
+ field_dict.pop(key, None)
+ return pos
+ # pylint: disable=protected-access
+ if enum_value in enum_type.values_by_number:
+ field_dict[key] = enum_value
+ else:
+ if not message._unknown_fields:
+ message._unknown_fields = []
+ tag_bytes = encoder.TagBytes(field_number,
+ wire_format.WIRETYPE_VARINT)
+ message._unknown_fields.append(
+ (tag_bytes, buffer[value_start_pos:pos].tobytes()))
+ if message._unknown_field_set is None:
+ message._unknown_field_set = containers.UnknownFieldSet()
+ message._unknown_field_set._add(
+ field_number, wire_format.WIRETYPE_VARINT, enum_value)
+ # pylint: enable=protected-access
+ return pos
+ return DecodeField
+
+
+# --------------------------------------------------------------------
+
+
+Int32Decoder = _SimpleDecoder(
+ wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32)
+
+Int64Decoder = _SimpleDecoder(
+ wire_format.WIRETYPE_VARINT, _DecodeSignedVarint)
+
+UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32)
+UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint)
+
+SInt32Decoder = _ModifiedDecoder(
+ wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode)
+SInt64Decoder = _ModifiedDecoder(
+ wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode)
+
+# Note that Python conveniently guarantees that when using the '<' prefix on
+# formats, they will also have the same size across all platforms (as opposed
+# to without the prefix, where their sizes depend on the C compiler's basic
+# type sizes).
+Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, ' end:
+ raise _DecodeError('Truncated string.')
+ value.append(_ConvertToUnicode(buffer[pos:new_pos]))
+ # Predict that the next tag is another copy of the same repeated field.
+ pos = new_pos + tag_len
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
+ # Prediction failed. Return.
+ return new_pos
+ return DecodeRepeatedField
+ else:
+ def DecodeField(buffer, pos, end, message, field_dict):
+ (size, pos) = local_DecodeVarint(buffer, pos)
+ new_pos = pos + size
+ if new_pos > end:
+ raise _DecodeError('Truncated string.')
+ if clear_if_default and not size:
+ field_dict.pop(key, None)
+ else:
+ field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos])
+ return new_pos
+ return DecodeField
+
+
+def BytesDecoder(field_number, is_repeated, is_packed, key, new_default,
+ clear_if_default=False):
+ """Returns a decoder for a bytes field."""
+
+ local_DecodeVarint = _DecodeVarint
+
+ assert not is_packed
+ if is_repeated:
+ tag_bytes = encoder.TagBytes(field_number,
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
+ tag_len = len(tag_bytes)
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ while 1:
+ (size, pos) = local_DecodeVarint(buffer, pos)
+ new_pos = pos + size
+ if new_pos > end:
+ raise _DecodeError('Truncated string.')
+ value.append(buffer[pos:new_pos].tobytes())
+ # Predict that the next tag is another copy of the same repeated field.
+ pos = new_pos + tag_len
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
+ # Prediction failed. Return.
+ return new_pos
+ return DecodeRepeatedField
+ else:
+ def DecodeField(buffer, pos, end, message, field_dict):
+ (size, pos) = local_DecodeVarint(buffer, pos)
+ new_pos = pos + size
+ if new_pos > end:
+ raise _DecodeError('Truncated string.')
+ if clear_if_default and not size:
+ field_dict.pop(key, None)
+ else:
+ field_dict[key] = buffer[pos:new_pos].tobytes()
+ return new_pos
+ return DecodeField
+
+
+def GroupDecoder(field_number, is_repeated, is_packed, key, new_default):
+ """Returns a decoder for a group field."""
+
+ end_tag_bytes = encoder.TagBytes(field_number,
+ wire_format.WIRETYPE_END_GROUP)
+ end_tag_len = len(end_tag_bytes)
+
+ assert not is_packed
+ if is_repeated:
+ tag_bytes = encoder.TagBytes(field_number,
+ wire_format.WIRETYPE_START_GROUP)
+ tag_len = len(tag_bytes)
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ while 1:
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ # Read sub-message.
+ pos = value.add()._InternalParse(buffer, pos, end)
+ # Read end tag.
+ new_pos = pos+end_tag_len
+ if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
+ raise _DecodeError('Missing group end tag.')
+ # Predict that the next tag is another copy of the same repeated field.
+ pos = new_pos + tag_len
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
+ # Prediction failed. Return.
+ return new_pos
+ return DecodeRepeatedField
+ else:
+ def DecodeField(buffer, pos, end, message, field_dict):
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ # Read sub-message.
+ pos = value._InternalParse(buffer, pos, end)
+ # Read end tag.
+ new_pos = pos+end_tag_len
+ if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
+ raise _DecodeError('Missing group end tag.')
+ return new_pos
+ return DecodeField
+
+
+def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
+ """Returns a decoder for a message field."""
+
+ local_DecodeVarint = _DecodeVarint
+
+ assert not is_packed
+ if is_repeated:
+ tag_bytes = encoder.TagBytes(field_number,
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
+ tag_len = len(tag_bytes)
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ while 1:
+ # Read length.
+ (size, pos) = local_DecodeVarint(buffer, pos)
+ new_pos = pos + size
+ if new_pos > end:
+ raise _DecodeError('Truncated message.')
+ # Read sub-message.
+ if value.add()._InternalParse(buffer, pos, new_pos) != new_pos:
+ # The only reason _InternalParse would return early is if it
+ # encountered an end-group tag.
+ raise _DecodeError('Unexpected end-group tag.')
+ # Predict that the next tag is another copy of the same repeated field.
+ pos = new_pos + tag_len
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
+ # Prediction failed. Return.
+ return new_pos
+ return DecodeRepeatedField
+ else:
+ def DecodeField(buffer, pos, end, message, field_dict):
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ # Read length.
+ (size, pos) = local_DecodeVarint(buffer, pos)
+ new_pos = pos + size
+ if new_pos > end:
+ raise _DecodeError('Truncated message.')
+ # Read sub-message.
+ if value._InternalParse(buffer, pos, new_pos) != new_pos:
+ # The only reason _InternalParse would return early is if it encountered
+ # an end-group tag.
+ raise _DecodeError('Unexpected end-group tag.')
+ return new_pos
+ return DecodeField
+
+
+# --------------------------------------------------------------------
+
+MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP)
+
+def MessageSetItemDecoder(descriptor):
+ """Returns a decoder for a MessageSet item.
+
+ The parameter is the message Descriptor.
+
+ The message set message looks like this:
+ message MessageSet {
+ repeated group Item = 1 {
+ required int32 type_id = 2;
+ required string message = 3;
+ }
+ }
+ """
+
+ type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
+ message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
+ item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
+
+ local_ReadTag = ReadTag
+ local_DecodeVarint = _DecodeVarint
+ local_SkipField = SkipField
+
+ def DecodeItem(buffer, pos, end, message, field_dict):
+ """Decode serialized message set to its value and new position.
+
+ Args:
+ buffer: memoryview of the serialized bytes.
+ pos: int, position in the memory view to start at.
+ end: int, end position of serialized data
+ message: Message object to store unknown fields in
+ field_dict: Map[Descriptor, Any] to store decoded values in.
+
+ Returns:
+ int, new position in serialized data.
+ """
+ message_set_item_start = pos
+ type_id = -1
+ message_start = -1
+ message_end = -1
+
+ # Technically, type_id and message can appear in any order, so we need
+ # a little loop here.
+ while 1:
+ (tag_bytes, pos) = local_ReadTag(buffer, pos)
+ if tag_bytes == type_id_tag_bytes:
+ (type_id, pos) = local_DecodeVarint(buffer, pos)
+ elif tag_bytes == message_tag_bytes:
+ (size, message_start) = local_DecodeVarint(buffer, pos)
+ pos = message_end = message_start + size
+ elif tag_bytes == item_end_tag_bytes:
+ break
+ else:
+ pos = SkipField(buffer, pos, end, tag_bytes)
+ if pos == -1:
+ raise _DecodeError('Missing group end tag.')
+
+ if pos > end:
+ raise _DecodeError('Truncated message.')
+
+ if type_id == -1:
+ raise _DecodeError('MessageSet item missing type_id.')
+ if message_start == -1:
+ raise _DecodeError('MessageSet item missing message.')
+
+ extension = message.Extensions._FindExtensionByNumber(type_id)
+ # pylint: disable=protected-access
+ if extension is not None:
+ value = field_dict.get(extension)
+ if value is None:
+ message_type = extension.message_type
+ if not hasattr(message_type, '_concrete_class'):
+ message_factory.GetMessageClass(message_type)
+ value = field_dict.setdefault(
+ extension, message_type._concrete_class())
+ if value._InternalParse(buffer, message_start,message_end) != message_end:
+ # The only reason _InternalParse would return early is if it encountered
+ # an end-group tag.
+ raise _DecodeError('Unexpected end-group tag.')
+ else:
+ if not message._unknown_fields:
+ message._unknown_fields = []
+ message._unknown_fields.append(
+ (MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos].tobytes()))
+ if message._unknown_field_set is None:
+ message._unknown_field_set = containers.UnknownFieldSet()
+ message._unknown_field_set._add(
+ type_id,
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
+ buffer[message_start:message_end].tobytes())
+ # pylint: enable=protected-access
+
+ return pos
+
+ return DecodeItem
+
+
+def UnknownMessageSetItemDecoder():
+ """Returns a decoder for a Unknown MessageSet item."""
+
+ type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
+ message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
+ item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
+
+ def DecodeUnknownItem(buffer):
+ pos = 0
+ end = len(buffer)
+ message_start = -1
+ message_end = -1
+ while 1:
+ (tag_bytes, pos) = ReadTag(buffer, pos)
+ if tag_bytes == type_id_tag_bytes:
+ (type_id, pos) = _DecodeVarint(buffer, pos)
+ elif tag_bytes == message_tag_bytes:
+ (size, message_start) = _DecodeVarint(buffer, pos)
+ pos = message_end = message_start + size
+ elif tag_bytes == item_end_tag_bytes:
+ break
+ else:
+ pos = SkipField(buffer, pos, end, tag_bytes)
+ if pos == -1:
+ raise _DecodeError('Missing group end tag.')
+
+ if pos > end:
+ raise _DecodeError('Truncated message.')
+
+ if type_id == -1:
+ raise _DecodeError('MessageSet item missing type_id.')
+ if message_start == -1:
+ raise _DecodeError('MessageSet item missing message.')
+
+ return (type_id, buffer[message_start:message_end].tobytes())
+
+ return DecodeUnknownItem
+
+# --------------------------------------------------------------------
+
+def MapDecoder(field_descriptor, new_default, is_message_map):
+ """Returns a decoder for a map field."""
+
+ key = field_descriptor
+ tag_bytes = encoder.TagBytes(field_descriptor.number,
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
+ tag_len = len(tag_bytes)
+ local_DecodeVarint = _DecodeVarint
+ # Can't read _concrete_class yet; might not be initialized.
+ message_type = field_descriptor.message_type
+
+ def DecodeMap(buffer, pos, end, message, field_dict):
+ submsg = message_type._concrete_class()
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ while 1:
+ # Read length.
+ (size, pos) = local_DecodeVarint(buffer, pos)
+ new_pos = pos + size
+ if new_pos > end:
+ raise _DecodeError('Truncated message.')
+ # Read sub-message.
+ submsg.Clear()
+ if submsg._InternalParse(buffer, pos, new_pos) != new_pos:
+ # The only reason _InternalParse would return early is if it
+ # encountered an end-group tag.
+ raise _DecodeError('Unexpected end-group tag.')
+
+ if is_message_map:
+ value[submsg.key].CopyFrom(submsg.value)
+ else:
+ value[submsg.key] = submsg.value
+
+ # Predict that the next tag is another copy of the same repeated field.
+ pos = new_pos + tag_len
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
+ # Prediction failed. Return.
+ return new_pos
+
+ return DecodeMap
+
+# --------------------------------------------------------------------
+# Optimization is not as heavy here because calls to SkipField() are rare,
+# except for handling end-group tags.
+
+def _SkipVarint(buffer, pos, end):
+ """Skip a varint value. Returns the new position."""
+ # Previously ord(buffer[pos]) raised IndexError when pos is out of range.
+ # With this code, ord(b'') raises TypeError. Both are handled in
+ # python_message.py to generate a 'Truncated message' error.
+ while ord(buffer[pos:pos+1].tobytes()) & 0x80:
+ pos += 1
+ pos += 1
+ if pos > end:
+ raise _DecodeError('Truncated message.')
+ return pos
+
+def _SkipFixed64(buffer, pos, end):
+ """Skip a fixed64 value. Returns the new position."""
+
+ pos += 8
+ if pos > end:
+ raise _DecodeError('Truncated message.')
+ return pos
+
+
+def _DecodeFixed64(buffer, pos):
+ """Decode a fixed64."""
+ new_pos = pos + 8
+ return (struct.unpack(' end:
+ raise _DecodeError('Truncated message.')
+ return pos
+
+
+def _SkipGroup(buffer, pos, end):
+ """Skip sub-group. Returns the new position."""
+
+ while 1:
+ (tag_bytes, pos) = ReadTag(buffer, pos)
+ new_pos = SkipField(buffer, pos, end, tag_bytes)
+ if new_pos == -1:
+ return pos
+ pos = new_pos
+
+
+def _DecodeUnknownFieldSet(buffer, pos, end_pos=None):
+ """Decode UnknownFieldSet. Returns the UnknownFieldSet and new position."""
+
+ unknown_field_set = containers.UnknownFieldSet()
+ while end_pos is None or pos < end_pos:
+ (tag_bytes, pos) = ReadTag(buffer, pos)
+ (tag, _) = _DecodeVarint(tag_bytes, 0)
+ field_number, wire_type = wire_format.UnpackTag(tag)
+ if wire_type == wire_format.WIRETYPE_END_GROUP:
+ break
+ (data, pos) = _DecodeUnknownField(buffer, pos, wire_type)
+ # pylint: disable=protected-access
+ unknown_field_set._add(field_number, wire_type, data)
+
+ return (unknown_field_set, pos)
+
+
+def _DecodeUnknownField(buffer, pos, wire_type):
+ """Decode a unknown field. Returns the UnknownField and new position."""
+
+ if wire_type == wire_format.WIRETYPE_VARINT:
+ (data, pos) = _DecodeVarint(buffer, pos)
+ elif wire_type == wire_format.WIRETYPE_FIXED64:
+ (data, pos) = _DecodeFixed64(buffer, pos)
+ elif wire_type == wire_format.WIRETYPE_FIXED32:
+ (data, pos) = _DecodeFixed32(buffer, pos)
+ elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED:
+ (size, pos) = _DecodeVarint(buffer, pos)
+ data = buffer[pos:pos+size].tobytes()
+ pos += size
+ elif wire_type == wire_format.WIRETYPE_START_GROUP:
+ (data, pos) = _DecodeUnknownFieldSet(buffer, pos)
+ elif wire_type == wire_format.WIRETYPE_END_GROUP:
+ return (0, -1)
+ else:
+ raise _DecodeError('Wrong wire type in tag.')
+
+ return (data, pos)
+
+
+def _EndGroup(buffer, pos, end):
+ """Skipping an END_GROUP tag returns -1 to tell the parent loop to break."""
+
+ return -1
+
+
+def _SkipFixed32(buffer, pos, end):
+ """Skip a fixed32 value. Returns the new position."""
+
+ pos += 4
+ if pos > end:
+ raise _DecodeError('Truncated message.')
+ return pos
+
+
+def _DecodeFixed32(buffer, pos):
+ """Decode a fixed32."""
+
+ new_pos = pos + 4
+ return (struct.unpack('B').pack
+
+ def EncodeVarint(write, value, unused_deterministic=None):
+ bits = value & 0x7f
+ value >>= 7
+ while value:
+ write(local_int2byte(0x80|bits))
+ bits = value & 0x7f
+ value >>= 7
+ return write(local_int2byte(bits))
+
+ return EncodeVarint
+
+
+def _SignedVarintEncoder():
+ """Return an encoder for a basic signed varint value (does not include
+ tag)."""
+
+ local_int2byte = struct.Struct('>B').pack
+
+ def EncodeSignedVarint(write, value, unused_deterministic=None):
+ if value < 0:
+ value += (1 << 64)
+ bits = value & 0x7f
+ value >>= 7
+ while value:
+ write(local_int2byte(0x80|bits))
+ bits = value & 0x7f
+ value >>= 7
+ return write(local_int2byte(bits))
+
+ return EncodeSignedVarint
+
+
+_EncodeVarint = _VarintEncoder()
+_EncodeSignedVarint = _SignedVarintEncoder()
+
+
+def _VarintBytes(value):
+ """Encode the given integer as a varint and return the bytes. This is only
+ called at startup time so it doesn't need to be fast."""
+
+ pieces = []
+ _EncodeVarint(pieces.append, value, True)
+ return b"".join(pieces)
+
+
+def TagBytes(field_number, wire_type):
+ """Encode the given tag and return the bytes. Only called at startup."""
+
+ return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type)))
+
+# --------------------------------------------------------------------
+# As with sizers (see above), we have a number of common encoder
+# implementations.
+
+
+def _SimpleEncoder(wire_type, encode_value, compute_value_size):
+ """Return a constructor for an encoder for fields of a particular type.
+
+ Args:
+ wire_type: The field's wire type, for encoding tags.
+ encode_value: A function which encodes an individual value, e.g.
+ _EncodeVarint().
+ compute_value_size: A function which computes the size of an individual
+ value, e.g. _VarintSize().
+ """
+
+ def SpecificEncoder(field_number, is_repeated, is_packed):
+ if is_packed:
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+ local_EncodeVarint = _EncodeVarint
+ def EncodePackedField(write, value, deterministic):
+ write(tag_bytes)
+ size = 0
+ for element in value:
+ size += compute_value_size(element)
+ local_EncodeVarint(write, size, deterministic)
+ for element in value:
+ encode_value(write, element, deterministic)
+ return EncodePackedField
+ elif is_repeated:
+ tag_bytes = TagBytes(field_number, wire_type)
+ def EncodeRepeatedField(write, value, deterministic):
+ for element in value:
+ write(tag_bytes)
+ encode_value(write, element, deterministic)
+ return EncodeRepeatedField
+ else:
+ tag_bytes = TagBytes(field_number, wire_type)
+ def EncodeField(write, value, deterministic):
+ write(tag_bytes)
+ return encode_value(write, value, deterministic)
+ return EncodeField
+
+ return SpecificEncoder
+
+
+def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
+ """Like SimpleEncoder but additionally invokes modify_value on every value
+ before passing it to encode_value. Usually modify_value is ZigZagEncode."""
+
+ def SpecificEncoder(field_number, is_repeated, is_packed):
+ if is_packed:
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+ local_EncodeVarint = _EncodeVarint
+ def EncodePackedField(write, value, deterministic):
+ write(tag_bytes)
+ size = 0
+ for element in value:
+ size += compute_value_size(modify_value(element))
+ local_EncodeVarint(write, size, deterministic)
+ for element in value:
+ encode_value(write, modify_value(element), deterministic)
+ return EncodePackedField
+ elif is_repeated:
+ tag_bytes = TagBytes(field_number, wire_type)
+ def EncodeRepeatedField(write, value, deterministic):
+ for element in value:
+ write(tag_bytes)
+ encode_value(write, modify_value(element), deterministic)
+ return EncodeRepeatedField
+ else:
+ tag_bytes = TagBytes(field_number, wire_type)
+ def EncodeField(write, value, deterministic):
+ write(tag_bytes)
+ return encode_value(write, modify_value(value), deterministic)
+ return EncodeField
+
+ return SpecificEncoder
+
+
+def _StructPackEncoder(wire_type, format):
+ """Return a constructor for an encoder for a fixed-width field.
+
+ Args:
+ wire_type: The field's wire type, for encoding tags.
+ format: The format string to pass to struct.pack().
+ """
+
+ value_size = struct.calcsize(format)
+
+ def SpecificEncoder(field_number, is_repeated, is_packed):
+ local_struct_pack = struct.pack
+ if is_packed:
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+ local_EncodeVarint = _EncodeVarint
+ def EncodePackedField(write, value, deterministic):
+ write(tag_bytes)
+ local_EncodeVarint(write, len(value) * value_size, deterministic)
+ for element in value:
+ write(local_struct_pack(format, element))
+ return EncodePackedField
+ elif is_repeated:
+ tag_bytes = TagBytes(field_number, wire_type)
+ def EncodeRepeatedField(write, value, unused_deterministic=None):
+ for element in value:
+ write(tag_bytes)
+ write(local_struct_pack(format, element))
+ return EncodeRepeatedField
+ else:
+ tag_bytes = TagBytes(field_number, wire_type)
+ def EncodeField(write, value, unused_deterministic=None):
+ write(tag_bytes)
+ return write(local_struct_pack(format, value))
+ return EncodeField
+
+ return SpecificEncoder
+
+
+def _FloatingPointEncoder(wire_type, format):
+ """Return a constructor for an encoder for float fields.
+
+ This is like StructPackEncoder, but catches errors that may be due to
+ passing non-finite floating-point values to struct.pack, and makes a
+ second attempt to encode those values.
+
+ Args:
+ wire_type: The field's wire type, for encoding tags.
+ format: The format string to pass to struct.pack().
+ """
+
+ value_size = struct.calcsize(format)
+ if value_size == 4:
+ def EncodeNonFiniteOrRaise(write, value):
+ # Remember that the serialized form uses little-endian byte order.
+ if value == _POS_INF:
+ write(b'\x00\x00\x80\x7F')
+ elif value == _NEG_INF:
+ write(b'\x00\x00\x80\xFF')
+ elif value != value: # NaN
+ write(b'\x00\x00\xC0\x7F')
+ else:
+ raise
+ elif value_size == 8:
+ def EncodeNonFiniteOrRaise(write, value):
+ if value == _POS_INF:
+ write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F')
+ elif value == _NEG_INF:
+ write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF')
+ elif value != value: # NaN
+ write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F')
+ else:
+ raise
+ else:
+ raise ValueError('Can\'t encode floating-point values that are '
+ '%d bytes long (only 4 or 8)' % value_size)
+
+ def SpecificEncoder(field_number, is_repeated, is_packed):
+ local_struct_pack = struct.pack
+ if is_packed:
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+ local_EncodeVarint = _EncodeVarint
+ def EncodePackedField(write, value, deterministic):
+ write(tag_bytes)
+ local_EncodeVarint(write, len(value) * value_size, deterministic)
+ for element in value:
+ # This try/except block is going to be faster than any code that
+ # we could write to check whether element is finite.
+ try:
+ write(local_struct_pack(format, element))
+ except SystemError:
+ EncodeNonFiniteOrRaise(write, element)
+ return EncodePackedField
+ elif is_repeated:
+ tag_bytes = TagBytes(field_number, wire_type)
+ def EncodeRepeatedField(write, value, unused_deterministic=None):
+ for element in value:
+ write(tag_bytes)
+ try:
+ write(local_struct_pack(format, element))
+ except SystemError:
+ EncodeNonFiniteOrRaise(write, element)
+ return EncodeRepeatedField
+ else:
+ tag_bytes = TagBytes(field_number, wire_type)
+ def EncodeField(write, value, unused_deterministic=None):
+ write(tag_bytes)
+ try:
+ write(local_struct_pack(format, value))
+ except SystemError:
+ EncodeNonFiniteOrRaise(write, value)
+ return EncodeField
+
+ return SpecificEncoder
+
+
+# ====================================================================
+# Here we declare an encoder constructor for each field type. These work
+# very similarly to sizer constructors, described earlier.
+
+
+Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
+ wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
+
+UInt32Encoder = UInt64Encoder = _SimpleEncoder(
+ wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
+
+SInt32Encoder = SInt64Encoder = _ModifiedEncoder(
+ wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize,
+ wire_format.ZigZagEncode)
+
+# Note that Python conveniently guarantees that when using the '<' prefix on
+# formats, they will also have the same size across all platforms (as opposed
+# to without the prefix, where their sizes depend on the C compiler's basic
+# type sizes).
+Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, ' str
+ ValueType = int
+
+ def __init__(self, enum_type):
+ """Inits EnumTypeWrapper with an EnumDescriptor."""
+ self._enum_type = enum_type
+ self.DESCRIPTOR = enum_type # pylint: disable=invalid-name
+
+ def Name(self, number): # pylint: disable=invalid-name
+ """Returns a string containing the name of an enum value."""
+ try:
+ return self._enum_type.values_by_number[number].name
+ except KeyError:
+ pass # fall out to break exception chaining
+
+ if not isinstance(number, int):
+ raise TypeError(
+ 'Enum value for {} must be an int, but got {} {!r}.'.format(
+ self._enum_type.name, type(number), number))
+ else:
+ # repr here to handle the odd case when you pass in a boolean.
+ raise ValueError('Enum {} has no name defined for value {!r}'.format(
+ self._enum_type.name, number))
+
+ def Value(self, name): # pylint: disable=invalid-name
+ """Returns the value corresponding to the given enum name."""
+ try:
+ return self._enum_type.values_by_name[name].number
+ except KeyError:
+ pass # fall out to break exception chaining
+ raise ValueError('Enum {} has no value defined for name {!r}'.format(
+ self._enum_type.name, name))
+
+ def keys(self):
+ """Return a list of the string names in the enum.
+
+ Returns:
+ A list of strs, in the order they were defined in the .proto file.
+ """
+
+ return [value_descriptor.name
+ for value_descriptor in self._enum_type.values]
+
+ def values(self):
+ """Return a list of the integer values in the enum.
+
+ Returns:
+ A list of ints, in the order they were defined in the .proto file.
+ """
+
+ return [value_descriptor.number
+ for value_descriptor in self._enum_type.values]
+
+ def items(self):
+ """Return a list of the (name, value) pairs of the enum.
+
+ Returns:
+ A list of (str, int) pairs, in the order they were defined
+ in the .proto file.
+ """
+ return [(value_descriptor.name, value_descriptor.number)
+ for value_descriptor in self._enum_type.values]
+
+ def __getattr__(self, name):
+ """Returns the value corresponding to the given enum name."""
+ try:
+ return super(
+ EnumTypeWrapper,
+ self).__getattribute__('_enum_type').values_by_name[name].number
+ except KeyError:
+ pass # fall out to break exception chaining
+ raise AttributeError('Enum {} has no value defined for name {!r}'.format(
+ self._enum_type.name, name))
diff --git a/Lib/site-packages/google/protobuf/internal/extension_dict.py b/Lib/site-packages/google/protobuf/internal/extension_dict.py
new file mode 100644
index 0000000..89e64d3
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/internal/extension_dict.py
@@ -0,0 +1,194 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Contains _ExtensionDict class to represent extensions.
+"""
+
+from google.protobuf.internal import type_checkers
+from google.protobuf.descriptor import FieldDescriptor
+
+
+def _VerifyExtensionHandle(message, extension_handle):
+ """Verify that the given extension handle is valid."""
+
+ if not isinstance(extension_handle, FieldDescriptor):
+ raise KeyError('HasExtension() expects an extension handle, got: %s' %
+ extension_handle)
+
+ if not extension_handle.is_extension:
+ raise KeyError('"%s" is not an extension.' % extension_handle.full_name)
+
+ if not extension_handle.containing_type:
+ raise KeyError('"%s" is missing a containing_type.'
+ % extension_handle.full_name)
+
+ if extension_handle.containing_type is not message.DESCRIPTOR:
+ raise KeyError('Extension "%s" extends message type "%s", but this '
+ 'message is of type "%s".' %
+ (extension_handle.full_name,
+ extension_handle.containing_type.full_name,
+ message.DESCRIPTOR.full_name))
+
+
+# TODO: Unify error handling of "unknown extension" crap.
+# TODO: Support iteritems()-style iteration over all
+# extensions with the "has" bits turned on?
+class _ExtensionDict(object):
+
+ """Dict-like container for Extension fields on proto instances.
+
+ Note that in all cases we expect extension handles to be
+ FieldDescriptors.
+ """
+
+ def __init__(self, extended_message):
+ """
+ Args:
+ extended_message: Message instance for which we are the Extensions dict.
+ """
+ self._extended_message = extended_message
+
+ def __getitem__(self, extension_handle):
+ """Returns the current value of the given extension handle."""
+
+ _VerifyExtensionHandle(self._extended_message, extension_handle)
+
+ result = self._extended_message._fields.get(extension_handle)
+ if result is not None:
+ return result
+
+ if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
+ result = extension_handle._default_constructor(self._extended_message)
+ elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
+ message_type = extension_handle.message_type
+ if not hasattr(message_type, '_concrete_class'):
+ # pylint: disable=g-import-not-at-top
+ from google.protobuf import message_factory
+ message_factory.GetMessageClass(message_type)
+ if not hasattr(extension_handle.message_type, '_concrete_class'):
+ from google.protobuf import message_factory
+ message_factory.GetMessageClass(extension_handle.message_type)
+ result = extension_handle.message_type._concrete_class()
+ try:
+ result._SetListener(self._extended_message._listener_for_children)
+ except ReferenceError:
+ pass
+ else:
+ # Singular scalar -- just return the default without inserting into the
+ # dict.
+ return extension_handle.default_value
+
+ # Atomically check if another thread has preempted us and, if not, swap
+ # in the new object we just created. If someone has preempted us, we
+ # take that object and discard ours.
+ # WARNING: We are relying on setdefault() being atomic. This is true
+ # in CPython but we haven't investigated others. This warning appears
+ # in several other locations in this file.
+ result = self._extended_message._fields.setdefault(
+ extension_handle, result)
+
+ return result
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return False
+
+ my_fields = self._extended_message.ListFields()
+ other_fields = other._extended_message.ListFields()
+
+ # Get rid of non-extension fields.
+ my_fields = [field for field in my_fields if field.is_extension]
+ other_fields = [field for field in other_fields if field.is_extension]
+
+ return my_fields == other_fields
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __len__(self):
+ fields = self._extended_message.ListFields()
+ # Get rid of non-extension fields.
+ extension_fields = [field for field in fields if field[0].is_extension]
+ return len(extension_fields)
+
+ def __hash__(self):
+ raise TypeError('unhashable object')
+
+ # Note that this is only meaningful for non-repeated, scalar extension
+ # fields. Note also that we may have to call _Modified() when we do
+ # successfully set a field this way, to set any necessary "has" bits in the
+ # ancestors of the extended message.
+ def __setitem__(self, extension_handle, value):
+ """If extension_handle specifies a non-repeated, scalar extension
+ field, sets the value of that field.
+ """
+
+ _VerifyExtensionHandle(self._extended_message, extension_handle)
+
+ if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or
+ extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE):
+ raise TypeError(
+ 'Cannot assign to extension "%s" because it is a repeated or '
+ 'composite type.' % extension_handle.full_name)
+
+ # It's slightly wasteful to lookup the type checker each time,
+ # but we expect this to be a vanishingly uncommon case anyway.
+ type_checker = type_checkers.GetTypeChecker(extension_handle)
+ # pylint: disable=protected-access
+ self._extended_message._fields[extension_handle] = (
+ type_checker.CheckValue(value))
+ self._extended_message._Modified()
+
+ def __delitem__(self, extension_handle):
+ self._extended_message.ClearExtension(extension_handle)
+
+ def _FindExtensionByName(self, name):
+ """Tries to find a known extension with the specified name.
+
+ Args:
+ name: Extension full name.
+
+ Returns:
+ Extension field descriptor.
+ """
+ descriptor = self._extended_message.DESCRIPTOR
+ extensions = descriptor.file.pool._extensions_by_name[descriptor]
+ return extensions.get(name, None)
+
+ def _FindExtensionByNumber(self, number):
+ """Tries to find a known extension with the field number.
+
+ Args:
+ number: Extension field number.
+
+ Returns:
+ Extension field descriptor.
+ """
+ descriptor = self._extended_message.DESCRIPTOR
+ extensions = descriptor.file.pool._extensions_by_number[descriptor]
+ return extensions.get(number, None)
+
+ def __iter__(self):
+ # Return a generator over the populated extension fields
+ return (f[0] for f in self._extended_message.ListFields()
+ if f[0].is_extension)
+
+ def __contains__(self, extension_handle):
+ _VerifyExtensionHandle(self._extended_message, extension_handle)
+
+ if extension_handle not in self._extended_message._fields:
+ return False
+
+ if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
+ return bool(self._extended_message._fields.get(extension_handle))
+
+ if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
+ value = self._extended_message._fields.get(extension_handle)
+ # pylint: disable=protected-access
+ return value is not None and value._is_present_in_parent
+
+ return True
diff --git a/Lib/site-packages/google/protobuf/internal/field_mask.py b/Lib/site-packages/google/protobuf/internal/field_mask.py
new file mode 100644
index 0000000..ae34f08
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/internal/field_mask.py
@@ -0,0 +1,310 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Contains FieldMask class."""
+
+from google.protobuf.descriptor import FieldDescriptor
+
+
+class FieldMask(object):
+ """Class for FieldMask message type."""
+
+ __slots__ = ()
+
+ def ToJsonString(self):
+ """Converts FieldMask to string according to proto3 JSON spec."""
+ camelcase_paths = []
+ for path in self.paths:
+ camelcase_paths.append(_SnakeCaseToCamelCase(path))
+ return ','.join(camelcase_paths)
+
+ def FromJsonString(self, value):
+ """Converts string to FieldMask according to proto3 JSON spec."""
+ if not isinstance(value, str):
+ raise ValueError('FieldMask JSON value not a string: {!r}'.format(value))
+ self.Clear()
+ if value:
+ for path in value.split(','):
+ self.paths.append(_CamelCaseToSnakeCase(path))
+
+ def IsValidForDescriptor(self, message_descriptor):
+ """Checks whether the FieldMask is valid for Message Descriptor."""
+ for path in self.paths:
+ if not _IsValidPath(message_descriptor, path):
+ return False
+ return True
+
+ def AllFieldsFromDescriptor(self, message_descriptor):
+ """Gets all direct fields of Message Descriptor to FieldMask."""
+ self.Clear()
+ for field in message_descriptor.fields:
+ self.paths.append(field.name)
+
+ def CanonicalFormFromMask(self, mask):
+ """Converts a FieldMask to the canonical form.
+
+ Removes paths that are covered by another path. For example,
+ "foo.bar" is covered by "foo" and will be removed if "foo"
+ is also in the FieldMask. Then sorts all paths in alphabetical order.
+
+ Args:
+ mask: The original FieldMask to be converted.
+ """
+ tree = _FieldMaskTree(mask)
+ tree.ToFieldMask(self)
+
+ def Union(self, mask1, mask2):
+ """Merges mask1 and mask2 into this FieldMask."""
+ _CheckFieldMaskMessage(mask1)
+ _CheckFieldMaskMessage(mask2)
+ tree = _FieldMaskTree(mask1)
+ tree.MergeFromFieldMask(mask2)
+ tree.ToFieldMask(self)
+
+ def Intersect(self, mask1, mask2):
+ """Intersects mask1 and mask2 into this FieldMask."""
+ _CheckFieldMaskMessage(mask1)
+ _CheckFieldMaskMessage(mask2)
+ tree = _FieldMaskTree(mask1)
+ intersection = _FieldMaskTree()
+ for path in mask2.paths:
+ tree.IntersectPath(path, intersection)
+ intersection.ToFieldMask(self)
+
+ def MergeMessage(
+ self, source, destination,
+ replace_message_field=False, replace_repeated_field=False):
+ """Merges fields specified in FieldMask from source to destination.
+
+ Args:
+ source: Source message.
+ destination: The destination message to be merged into.
+ replace_message_field: Replace message field if True. Merge message
+ field if False.
+ replace_repeated_field: Replace repeated field if True. Append
+ elements of repeated field if False.
+ """
+ tree = _FieldMaskTree(self)
+ tree.MergeMessage(
+ source, destination, replace_message_field, replace_repeated_field)
+
+
+def _IsValidPath(message_descriptor, path):
+ """Checks whether the path is valid for Message Descriptor."""
+ parts = path.split('.')
+ last = parts.pop()
+ for name in parts:
+ field = message_descriptor.fields_by_name.get(name)
+ if (field is None or
+ field.label == FieldDescriptor.LABEL_REPEATED or
+ field.type != FieldDescriptor.TYPE_MESSAGE):
+ return False
+ message_descriptor = field.message_type
+ return last in message_descriptor.fields_by_name
+
+
+def _CheckFieldMaskMessage(message):
+ """Raises ValueError if message is not a FieldMask."""
+ message_descriptor = message.DESCRIPTOR
+ if (message_descriptor.name != 'FieldMask' or
+ message_descriptor.file.name != 'google/protobuf/field_mask.proto'):
+ raise ValueError('Message {0} is not a FieldMask.'.format(
+ message_descriptor.full_name))
+
+
+def _SnakeCaseToCamelCase(path_name):
+ """Converts a path name from snake_case to camelCase."""
+ result = []
+ after_underscore = False
+ for c in path_name:
+ if c.isupper():
+ raise ValueError(
+ 'Fail to print FieldMask to Json string: Path name '
+ '{0} must not contain uppercase letters.'.format(path_name))
+ if after_underscore:
+ if c.islower():
+ result.append(c.upper())
+ after_underscore = False
+ else:
+ raise ValueError(
+ 'Fail to print FieldMask to Json string: The '
+ 'character after a "_" must be a lowercase letter '
+ 'in path name {0}.'.format(path_name))
+ elif c == '_':
+ after_underscore = True
+ else:
+ result += c
+
+ if after_underscore:
+ raise ValueError('Fail to print FieldMask to Json string: Trailing "_" '
+ 'in path name {0}.'.format(path_name))
+ return ''.join(result)
+
+
+def _CamelCaseToSnakeCase(path_name):
+ """Converts a field name from camelCase to snake_case."""
+ result = []
+ for c in path_name:
+ if c == '_':
+ raise ValueError('Fail to parse FieldMask: Path name '
+ '{0} must not contain "_"s.'.format(path_name))
+ if c.isupper():
+ result += '_'
+ result += c.lower()
+ else:
+ result += c
+ return ''.join(result)
+
+
+class _FieldMaskTree(object):
+ """Represents a FieldMask in a tree structure.
+
+ For example, given a FieldMask "foo.bar,foo.baz,bar.baz",
+ the FieldMaskTree will be:
+ [_root] -+- foo -+- bar
+ | |
+ | +- baz
+ |
+ +- bar --- baz
+ In the tree, each leaf node represents a field path.
+ """
+
+ __slots__ = ('_root',)
+
+ def __init__(self, field_mask=None):
+ """Initializes the tree by FieldMask."""
+ self._root = {}
+ if field_mask:
+ self.MergeFromFieldMask(field_mask)
+
+ def MergeFromFieldMask(self, field_mask):
+ """Merges a FieldMask to the tree."""
+ for path in field_mask.paths:
+ self.AddPath(path)
+
+ def AddPath(self, path):
+ """Adds a field path into the tree.
+
+ If the field path to add is a sub-path of an existing field path
+ in the tree (i.e., a leaf node), it means the tree already matches
+ the given path so nothing will be added to the tree. If the path
+ matches an existing non-leaf node in the tree, that non-leaf node
+ will be turned into a leaf node with all its children removed because
+ the path matches all the node's children. Otherwise, a new path will
+ be added.
+
+ Args:
+ path: The field path to add.
+ """
+ node = self._root
+ for name in path.split('.'):
+ if name not in node:
+ node[name] = {}
+ elif not node[name]:
+ # Pre-existing empty node implies we already have this entire tree.
+ return
+ node = node[name]
+ # Remove any sub-trees we might have had.
+ node.clear()
+
+ def ToFieldMask(self, field_mask):
+ """Converts the tree to a FieldMask."""
+ field_mask.Clear()
+ _AddFieldPaths(self._root, '', field_mask)
+
+ def IntersectPath(self, path, intersection):
+ """Calculates the intersection part of a field path with this tree.
+
+ Args:
+ path: The field path to calculates.
+ intersection: The out tree to record the intersection part.
+ """
+ node = self._root
+ for name in path.split('.'):
+ if name not in node:
+ return
+ elif not node[name]:
+ intersection.AddPath(path)
+ return
+ node = node[name]
+ intersection.AddLeafNodes(path, node)
+
+ def AddLeafNodes(self, prefix, node):
+ """Adds leaf nodes begin with prefix to this tree."""
+ if not node:
+ self.AddPath(prefix)
+ for name in node:
+ child_path = prefix + '.' + name
+ self.AddLeafNodes(child_path, node[name])
+
+ def MergeMessage(
+ self, source, destination,
+ replace_message, replace_repeated):
+ """Merge all fields specified by this tree from source to destination."""
+ _MergeMessage(
+ self._root, source, destination, replace_message, replace_repeated)
+
+
+def _StrConvert(value):
+ """Converts value to str if it is not."""
+ # This file is imported by c extension and some methods like ClearField
+ # requires string for the field name. py2/py3 has different text
+ # type and may use unicode.
+ if not isinstance(value, str):
+ return value.encode('utf-8')
+ return value
+
+
+def _MergeMessage(
+ node, source, destination, replace_message, replace_repeated):
+ """Merge all fields specified by a sub-tree from source to destination."""
+ source_descriptor = source.DESCRIPTOR
+ for name in node:
+ child = node[name]
+ field = source_descriptor.fields_by_name[name]
+ if field is None:
+ raise ValueError('Error: Can\'t find field {0} in message {1}.'.format(
+ name, source_descriptor.full_name))
+ if child:
+ # Sub-paths are only allowed for singular message fields.
+ if (field.label == FieldDescriptor.LABEL_REPEATED or
+ field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE):
+ raise ValueError('Error: Field {0} in message {1} is not a singular '
+ 'message field and cannot have sub-fields.'.format(
+ name, source_descriptor.full_name))
+ if source.HasField(name):
+ _MergeMessage(
+ child, getattr(source, name), getattr(destination, name),
+ replace_message, replace_repeated)
+ continue
+ if field.label == FieldDescriptor.LABEL_REPEATED:
+ if replace_repeated:
+ destination.ClearField(_StrConvert(name))
+ repeated_source = getattr(source, name)
+ repeated_destination = getattr(destination, name)
+ repeated_destination.MergeFrom(repeated_source)
+ else:
+ if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
+ if replace_message:
+ destination.ClearField(_StrConvert(name))
+ if source.HasField(name):
+ getattr(destination, name).MergeFrom(getattr(source, name))
+ else:
+ setattr(destination, name, getattr(source, name))
+
+
+def _AddFieldPaths(node, prefix, field_mask):
+ """Adds the field paths descended from node to field_mask."""
+ if not node and prefix:
+ field_mask.paths.append(prefix)
+ return
+ for name in sorted(node):
+ if prefix:
+ child_path = prefix + '.' + name
+ else:
+ child_path = name
+ _AddFieldPaths(node[name], child_path, field_mask)
diff --git a/Lib/site-packages/google/protobuf/internal/message_listener.py b/Lib/site-packages/google/protobuf/internal/message_listener.py
new file mode 100644
index 0000000..ff1c127
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/internal/message_listener.py
@@ -0,0 +1,55 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Defines a listener interface for observing certain
+state transitions on Message objects.
+
+Also defines a null implementation of this interface.
+"""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+
+class MessageListener(object):
+
+ """Listens for modifications made to a message. Meant to be registered via
+ Message._SetListener().
+
+ Attributes:
+ dirty: If True, then calling Modified() would be a no-op. This can be
+ used to avoid these calls entirely in the common case.
+ """
+
+ def Modified(self):
+ """Called every time the message is modified in such a way that the parent
+ message may need to be updated. This currently means either:
+ (a) The message was modified for the first time, so the parent message
+ should henceforth mark the message as present.
+ (b) The message's cached byte size became dirty -- i.e. the message was
+ modified for the first time after a previous call to ByteSize().
+ Therefore the parent should also mark its byte size as dirty.
+ Note that (a) implies (b), since new objects start out with a client cached
+ size (zero). However, we document (a) explicitly because it is important.
+
+ Modified() will *only* be called in response to one of these two events --
+ not every time the sub-message is modified.
+
+ Note that if the listener's |dirty| attribute is true, then calling
+ Modified at the moment would be a no-op, so it can be skipped. Performance-
+ sensitive callers should check this attribute directly before calling since
+ it will be true most of the time.
+ """
+
+ raise NotImplementedError
+
+
+class NullMessageListener(object):
+
+ """No-op MessageListener implementation."""
+
+ def Modified(self):
+ pass
diff --git a/Lib/site-packages/google/protobuf/internal/python_message.py b/Lib/site-packages/google/protobuf/internal/python_message.py
new file mode 100644
index 0000000..40c7764
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/internal/python_message.py
@@ -0,0 +1,1546 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+# This code is meant to work on Python 2.4 and above only.
+#
+# TODO: Helpers for verbose, common checks like seeing if a
+# descriptor's cpp_type is CPPTYPE_MESSAGE.
+
+"""Contains a metaclass and helper functions used to create
+protocol message classes from Descriptor objects at runtime.
+
+Recall that a metaclass is the "type" of a class.
+(A class is to a metaclass what an instance is to a class.)
+
+In this case, we use the GeneratedProtocolMessageType metaclass
+to inject all the useful functionality into the classes
+output by the protocol compiler at compile-time.
+
+The upshot of all this is that the real implementation
+details for ALL pure-Python protocol buffers are *here in
+this file*.
+"""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+from io import BytesIO
+import struct
+import sys
+import warnings
+import weakref
+
+from google.protobuf import descriptor as descriptor_mod
+from google.protobuf import message as message_mod
+from google.protobuf import text_format
+# We use "as" to avoid name collisions with variables.
+from google.protobuf.internal import api_implementation
+from google.protobuf.internal import containers
+from google.protobuf.internal import decoder
+from google.protobuf.internal import encoder
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf.internal import extension_dict
+from google.protobuf.internal import message_listener as message_listener_mod
+from google.protobuf.internal import type_checkers
+from google.protobuf.internal import well_known_types
+from google.protobuf.internal import wire_format
+
+_FieldDescriptor = descriptor_mod.FieldDescriptor
+_AnyFullTypeName = 'google.protobuf.Any'
+_ExtensionDict = extension_dict._ExtensionDict
+
+class GeneratedProtocolMessageType(type):
+
+ """Metaclass for protocol message classes created at runtime from Descriptors.
+
+ We add implementations for all methods described in the Message class. We
+ also create properties to allow getting/setting all fields in the protocol
+ message. Finally, we create slots to prevent users from accidentally
+ "setting" nonexistent fields in the protocol message, which then wouldn't get
+ serialized / deserialized properly.
+
+ The protocol compiler currently uses this metaclass to create protocol
+ message classes at runtime. Clients can also manually create their own
+ classes at runtime, as in this example:
+
+ mydescriptor = Descriptor(.....)
+ factory = symbol_database.Default()
+ factory.pool.AddDescriptor(mydescriptor)
+ MyProtoClass = factory.GetPrototype(mydescriptor)
+ myproto_instance = MyProtoClass()
+ myproto.foo_field = 23
+ ...
+ """
+
+ # Must be consistent with the protocol-compiler code in
+ # proto2/compiler/internal/generator.*.
+ _DESCRIPTOR_KEY = 'DESCRIPTOR'
+
+ def __new__(cls, name, bases, dictionary):
+ """Custom allocation for runtime-generated class types.
+
+ We override __new__ because this is apparently the only place
+ where we can meaningfully set __slots__ on the class we're creating(?).
+ (The interplay between metaclasses and slots is not very well-documented).
+
+ Args:
+ name: Name of the class (ignored, but required by the
+ metaclass protocol).
+ bases: Base classes of the class we're constructing.
+ (Should be message.Message). We ignore this field, but
+ it's required by the metaclass protocol
+ dictionary: The class dictionary of the class we're
+ constructing. dictionary[_DESCRIPTOR_KEY] must contain
+ a Descriptor object describing this protocol message
+ type.
+
+ Returns:
+ Newly-allocated class.
+
+ Raises:
+ RuntimeError: Generated code only work with python cpp extension.
+ """
+ descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
+
+ if isinstance(descriptor, str):
+ raise RuntimeError('The generated code only work with python cpp '
+ 'extension, but it is using pure python runtime.')
+
+ # If a concrete class already exists for this descriptor, don't try to
+ # create another. Doing so will break any messages that already exist with
+ # the existing class.
+ #
+ # The C++ implementation appears to have its own internal `PyMessageFactory`
+ # to achieve similar results.
+ #
+ # This most commonly happens in `text_format.py` when using descriptors from
+ # a custom pool; it calls symbol_database.Global().getPrototype() on a
+ # descriptor which already has an existing concrete class.
+ new_class = getattr(descriptor, '_concrete_class', None)
+ if new_class:
+ return new_class
+
+ if descriptor.full_name in well_known_types.WKTBASES:
+ bases += (well_known_types.WKTBASES[descriptor.full_name],)
+ _AddClassAttributesForNestedExtensions(descriptor, dictionary)
+ _AddSlots(descriptor, dictionary)
+
+ superclass = super(GeneratedProtocolMessageType, cls)
+ new_class = superclass.__new__(cls, name, bases, dictionary)
+ return new_class
+
+ def __init__(cls, name, bases, dictionary):
+ """Here we perform the majority of our work on the class.
+ We add enum getters, an __init__ method, implementations
+ of all Message methods, and properties for all fields
+ in the protocol type.
+
+ Args:
+ name: Name of the class (ignored, but required by the
+ metaclass protocol).
+ bases: Base classes of the class we're constructing.
+ (Should be message.Message). We ignore this field, but
+ it's required by the metaclass protocol
+ dictionary: The class dictionary of the class we're
+ constructing. dictionary[_DESCRIPTOR_KEY] must contain
+ a Descriptor object describing this protocol message
+ type.
+ """
+ descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
+
+ # If this is an _existing_ class looked up via `_concrete_class` in the
+ # __new__ method above, then we don't need to re-initialize anything.
+ existing_class = getattr(descriptor, '_concrete_class', None)
+ if existing_class:
+ assert existing_class is cls, (
+ 'Duplicate `GeneratedProtocolMessageType` created for descriptor %r'
+ % (descriptor.full_name))
+ return
+
+ cls._message_set_decoders_by_tag = {}
+ cls._fields_by_tag = {}
+ if (descriptor.has_options and
+ descriptor.GetOptions().message_set_wire_format):
+ cls._message_set_decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = (
+ decoder.MessageSetItemDecoder(descriptor),
+ None,
+ )
+
+ # Attach stuff to each FieldDescriptor for quick lookup later on.
+ for field in descriptor.fields:
+ _AttachFieldHelpers(cls, field)
+
+ if descriptor.is_extendable and hasattr(descriptor.file, 'pool'):
+ extensions = descriptor.file.pool.FindAllExtensions(descriptor)
+ for ext in extensions:
+ _AttachFieldHelpers(cls, ext)
+
+ descriptor._concrete_class = cls # pylint: disable=protected-access
+ _AddEnumValues(descriptor, cls)
+ _AddInitMethod(descriptor, cls)
+ _AddPropertiesForFields(descriptor, cls)
+ _AddPropertiesForExtensions(descriptor, cls)
+ _AddStaticMethods(cls)
+ _AddMessageMethods(descriptor, cls)
+ _AddPrivateHelperMethods(descriptor, cls)
+
+ superclass = super(GeneratedProtocolMessageType, cls)
+ superclass.__init__(name, bases, dictionary)
+
+
+# Stateless helpers for GeneratedProtocolMessageType below.
+# Outside clients should not access these directly.
+#
+# I opted not to make any of these methods on the metaclass, to make it more
+# clear that I'm not really using any state there and to keep clients from
+# thinking that they have direct access to these construction helpers.
+
+
+def _PropertyName(proto_field_name):
+ """Returns the name of the public property attribute which
+ clients can use to get and (in some cases) set the value
+ of a protocol message field.
+
+ Args:
+ proto_field_name: The protocol message field name, exactly
+ as it appears (or would appear) in a .proto file.
+ """
+ # TODO: Escape Python keywords (e.g., yield), and test this support.
+ # nnorwitz makes my day by writing:
+ # """
+ # FYI. See the keyword module in the stdlib. This could be as simple as:
+ #
+ # if keyword.iskeyword(proto_field_name):
+ # return proto_field_name + "_"
+ # return proto_field_name
+ # """
+ # Kenton says: The above is a BAD IDEA. People rely on being able to use
+ # getattr() and setattr() to reflectively manipulate field values. If we
+ # rename the properties, then every such user has to also make sure to apply
+ # the same transformation. Note that currently if you name a field "yield",
+ # you can still access it just fine using getattr/setattr -- it's not even
+ # that cumbersome to do so.
+ # TODO: Remove this method entirely if/when everyone agrees with my
+ # position.
+ return proto_field_name
+
+
+def _AddSlots(message_descriptor, dictionary):
+ """Adds a __slots__ entry to dictionary, containing the names of all valid
+ attributes for this message type.
+
+ Args:
+ message_descriptor: A Descriptor instance describing this message type.
+ dictionary: Class dictionary to which we'll add a '__slots__' entry.
+ """
+ dictionary['__slots__'] = ['_cached_byte_size',
+ '_cached_byte_size_dirty',
+ '_fields',
+ '_unknown_fields',
+ '_unknown_field_set',
+ '_is_present_in_parent',
+ '_listener',
+ '_listener_for_children',
+ '__weakref__',
+ '_oneofs']
+
+
+def _IsMessageSetExtension(field):
+ return (field.is_extension and
+ field.containing_type.has_options and
+ field.containing_type.GetOptions().message_set_wire_format and
+ field.type == _FieldDescriptor.TYPE_MESSAGE and
+ field.label == _FieldDescriptor.LABEL_OPTIONAL)
+
+
+def _IsMapField(field):
+ return (field.type == _FieldDescriptor.TYPE_MESSAGE and
+ field.message_type._is_map_entry)
+
+
+def _IsMessageMapField(field):
+ value_type = field.message_type.fields_by_name['value']
+ return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE
+
+def _AttachFieldHelpers(cls, field_descriptor):
+ is_repeated = field_descriptor.label == _FieldDescriptor.LABEL_REPEATED
+ field_descriptor._default_constructor = _DefaultValueConstructorForField(
+ field_descriptor
+ )
+
+ def AddFieldByTag(wiretype, is_packed):
+ tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype)
+ cls._fields_by_tag[tag_bytes] = (field_descriptor, is_packed)
+
+ AddFieldByTag(
+ type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type], False
+ )
+
+ if is_repeated and wire_format.IsTypePackable(field_descriptor.type):
+ # To support wire compatibility of adding packed = true, add a decoder for
+ # packed values regardless of the field's options.
+ AddFieldByTag(wire_format.WIRETYPE_LENGTH_DELIMITED, True)
+
+
+def _MaybeAddEncoder(cls, field_descriptor):
+ if hasattr(field_descriptor, '_encoder'):
+ return
+ is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED)
+ is_map_entry = _IsMapField(field_descriptor)
+ is_packed = field_descriptor.is_packed
+
+ if is_map_entry:
+ field_encoder = encoder.MapEncoder(field_descriptor)
+ sizer = encoder.MapSizer(field_descriptor,
+ _IsMessageMapField(field_descriptor))
+ elif _IsMessageSetExtension(field_descriptor):
+ field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number)
+ sizer = encoder.MessageSetItemSizer(field_descriptor.number)
+ else:
+ field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type](
+ field_descriptor.number, is_repeated, is_packed)
+ sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type](
+ field_descriptor.number, is_repeated, is_packed)
+
+ field_descriptor._sizer = sizer
+ field_descriptor._encoder = field_encoder
+
+
+def _MaybeAddDecoder(cls, field_descriptor):
+ if hasattr(field_descriptor, '_decoders'):
+ return
+
+ is_repeated = field_descriptor.label == _FieldDescriptor.LABEL_REPEATED
+ is_map_entry = _IsMapField(field_descriptor)
+ helper_decoders = {}
+
+ def AddDecoder(is_packed):
+ decode_type = field_descriptor.type
+ if (decode_type == _FieldDescriptor.TYPE_ENUM and
+ not field_descriptor.enum_type.is_closed):
+ decode_type = _FieldDescriptor.TYPE_INT32
+
+ oneof_descriptor = None
+ if field_descriptor.containing_oneof is not None:
+ oneof_descriptor = field_descriptor
+
+ if is_map_entry:
+ is_message_map = _IsMessageMapField(field_descriptor)
+
+ field_decoder = decoder.MapDecoder(
+ field_descriptor, _GetInitializeDefaultForMap(field_descriptor),
+ is_message_map)
+ elif decode_type == _FieldDescriptor.TYPE_STRING:
+ field_decoder = decoder.StringDecoder(
+ field_descriptor.number, is_repeated, is_packed,
+ field_descriptor, field_descriptor._default_constructor,
+ not field_descriptor.has_presence)
+ elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ field_decoder = type_checkers.TYPE_TO_DECODER[decode_type](
+ field_descriptor.number, is_repeated, is_packed,
+ field_descriptor, field_descriptor._default_constructor)
+ else:
+ field_decoder = type_checkers.TYPE_TO_DECODER[decode_type](
+ field_descriptor.number, is_repeated, is_packed,
+ # pylint: disable=protected-access
+ field_descriptor, field_descriptor._default_constructor,
+ not field_descriptor.has_presence)
+
+ helper_decoders[is_packed] = field_decoder
+
+ AddDecoder(False)
+
+ if is_repeated and wire_format.IsTypePackable(field_descriptor.type):
+ # To support wire compatibility of adding packed = true, add a decoder for
+ # packed values regardless of the field's options.
+ AddDecoder(True)
+
+ field_descriptor._decoders = helper_decoders
+
+
+def _AddClassAttributesForNestedExtensions(descriptor, dictionary):
+ extensions = descriptor.extensions_by_name
+ for extension_name, extension_field in extensions.items():
+ assert extension_name not in dictionary
+ dictionary[extension_name] = extension_field
+
+
+def _AddEnumValues(descriptor, cls):
+ """Sets class-level attributes for all enum fields defined in this message.
+
+ Also exporting a class-level object that can name enum values.
+
+ Args:
+ descriptor: Descriptor object for this message type.
+ cls: Class we're constructing for this message type.
+ """
+ for enum_type in descriptor.enum_types:
+ setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type))
+ for enum_value in enum_type.values:
+ setattr(cls, enum_value.name, enum_value.number)
+
+
+def _GetInitializeDefaultForMap(field):
+ if field.label != _FieldDescriptor.LABEL_REPEATED:
+ raise ValueError('map_entry set on non-repeated field %s' % (
+ field.name))
+ fields_by_name = field.message_type.fields_by_name
+ key_checker = type_checkers.GetTypeChecker(fields_by_name['key'])
+
+ value_field = fields_by_name['value']
+ if _IsMessageMapField(field):
+ def MakeMessageMapDefault(message):
+ return containers.MessageMap(
+ message._listener_for_children, value_field.message_type, key_checker,
+ field.message_type)
+ return MakeMessageMapDefault
+ else:
+ value_checker = type_checkers.GetTypeChecker(value_field)
+ def MakePrimitiveMapDefault(message):
+ return containers.ScalarMap(
+ message._listener_for_children, key_checker, value_checker,
+ field.message_type)
+ return MakePrimitiveMapDefault
+
+def _DefaultValueConstructorForField(field):
+ """Returns a function which returns a default value for a field.
+
+ Args:
+ field: FieldDescriptor object for this field.
+
+ The returned function has one argument:
+ message: Message instance containing this field, or a weakref proxy
+ of same.
+
+ That function in turn returns a default value for this field. The default
+ value may refer back to |message| via a weak reference.
+ """
+
+ if _IsMapField(field):
+ return _GetInitializeDefaultForMap(field)
+
+ if field.label == _FieldDescriptor.LABEL_REPEATED:
+ if field.has_default_value and field.default_value != []:
+ raise ValueError('Repeated field default value not empty list: %s' % (
+ field.default_value))
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ # We can't look at _concrete_class yet since it might not have
+ # been set. (Depends on order in which we initialize the classes).
+ message_type = field.message_type
+ def MakeRepeatedMessageDefault(message):
+ return containers.RepeatedCompositeFieldContainer(
+ message._listener_for_children, field.message_type)
+ return MakeRepeatedMessageDefault
+ else:
+ type_checker = type_checkers.GetTypeChecker(field)
+ def MakeRepeatedScalarDefault(message):
+ return containers.RepeatedScalarFieldContainer(
+ message._listener_for_children, type_checker)
+ return MakeRepeatedScalarDefault
+
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ message_type = field.message_type
+ def MakeSubMessageDefault(message):
+ # _concrete_class may not yet be initialized.
+ if not hasattr(message_type, '_concrete_class'):
+ from google.protobuf import message_factory
+ message_factory.GetMessageClass(message_type)
+ result = message_type._concrete_class()
+ result._SetListener(
+ _OneofListener(message, field)
+ if field.containing_oneof is not None
+ else message._listener_for_children)
+ return result
+ return MakeSubMessageDefault
+
+ def MakeScalarDefault(message):
+ # TODO: This may be broken since there may not be
+ # default_value. Combine with has_default_value somehow.
+ return field.default_value
+ return MakeScalarDefault
+
+
+def _ReraiseTypeErrorWithFieldName(message_name, field_name):
+ """Re-raise the currently-handled TypeError with the field name added."""
+ exc = sys.exc_info()[1]
+ if len(exc.args) == 1 and type(exc) is TypeError:
+ # simple TypeError; add field name to exception message
+ exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name))
+
+ # re-raise possibly-amended exception with original traceback:
+ raise exc.with_traceback(sys.exc_info()[2])
+
+
+def _AddInitMethod(message_descriptor, cls):
+ """Adds an __init__ method to cls."""
+
+ def _GetIntegerEnumValue(enum_type, value):
+ """Convert a string or integer enum value to an integer.
+
+ If the value is a string, it is converted to the enum value in
+ enum_type with the same name. If the value is not a string, it's
+ returned as-is. (No conversion or bounds-checking is done.)
+ """
+ if isinstance(value, str):
+ try:
+ return enum_type.values_by_name[value].number
+ except KeyError:
+ raise ValueError('Enum type %s: unknown label "%s"' % (
+ enum_type.full_name, value))
+ return value
+
+ def init(self, **kwargs):
+ self._cached_byte_size = 0
+ self._cached_byte_size_dirty = len(kwargs) > 0
+ self._fields = {}
+ # Contains a mapping from oneof field descriptors to the descriptor
+ # of the currently set field in that oneof field.
+ self._oneofs = {}
+
+ # _unknown_fields is () when empty for efficiency, and will be turned into
+ # a list if fields are added.
+ self._unknown_fields = ()
+ # _unknown_field_set is None when empty for efficiency, and will be
+ # turned into UnknownFieldSet struct if fields are added.
+ self._unknown_field_set = None # pylint: disable=protected-access
+ self._is_present_in_parent = False
+ self._listener = message_listener_mod.NullMessageListener()
+ self._listener_for_children = _Listener(self)
+ for field_name, field_value in kwargs.items():
+ field = _GetFieldByName(message_descriptor, field_name)
+ if field is None:
+ raise TypeError('%s() got an unexpected keyword argument "%s"' %
+ (message_descriptor.name, field_name))
+ if field_value is None:
+ # field=None is the same as no field at all.
+ continue
+ if field.label == _FieldDescriptor.LABEL_REPEATED:
+ copy = field._default_constructor(self)
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite
+ if _IsMapField(field):
+ if _IsMessageMapField(field):
+ for key in field_value:
+ copy[key].MergeFrom(field_value[key])
+ else:
+ copy.update(field_value)
+ else:
+ for val in field_value:
+ if isinstance(val, dict):
+ copy.add(**val)
+ else:
+ copy.add().MergeFrom(val)
+ else: # Scalar
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
+ field_value = [_GetIntegerEnumValue(field.enum_type, val)
+ for val in field_value]
+ copy.extend(field_value)
+ self._fields[field] = copy
+ elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ copy = field._default_constructor(self)
+ new_val = field_value
+ if isinstance(field_value, dict):
+ new_val = field.message_type._concrete_class(**field_value)
+ try:
+ copy.MergeFrom(new_val)
+ except TypeError:
+ _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
+ self._fields[field] = copy
+ else:
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
+ field_value = _GetIntegerEnumValue(field.enum_type, field_value)
+ try:
+ setattr(self, field_name, field_value)
+ except TypeError:
+ _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
+
+ init.__module__ = None
+ init.__doc__ = None
+ cls.__init__ = init
+
+
+def _GetFieldByName(message_descriptor, field_name):
+ """Returns a field descriptor by field name.
+
+ Args:
+ message_descriptor: A Descriptor describing all fields in message.
+ field_name: The name of the field to retrieve.
+ Returns:
+ The field descriptor associated with the field name.
+ """
+ try:
+ return message_descriptor.fields_by_name[field_name]
+ except KeyError:
+ raise ValueError('Protocol message %s has no "%s" field.' %
+ (message_descriptor.name, field_name))
+
+
+def _AddPropertiesForFields(descriptor, cls):
+ """Adds properties for all fields in this protocol message type."""
+ for field in descriptor.fields:
+ _AddPropertiesForField(field, cls)
+
+ if descriptor.is_extendable:
+ # _ExtensionDict is just an adaptor with no state so we allocate a new one
+ # every time it is accessed.
+ cls.Extensions = property(lambda self: _ExtensionDict(self))
+
+
+def _AddPropertiesForField(field, cls):
+ """Adds a public property for a protocol message field.
+ Clients can use this property to get and (in the case
+ of non-repeated scalar fields) directly set the value
+ of a protocol message field.
+
+ Args:
+ field: A FieldDescriptor for this field.
+ cls: The class we're constructing.
+ """
+ # Catch it if we add other types that we should
+ # handle specially here.
+ assert _FieldDescriptor.MAX_CPPTYPE == 10
+
+ constant_name = field.name.upper() + '_FIELD_NUMBER'
+ setattr(cls, constant_name, field.number)
+
+ if field.label == _FieldDescriptor.LABEL_REPEATED:
+ _AddPropertiesForRepeatedField(field, cls)
+ elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ _AddPropertiesForNonRepeatedCompositeField(field, cls)
+ else:
+ _AddPropertiesForNonRepeatedScalarField(field, cls)
+
+
+class _FieldProperty(property):
+ __slots__ = ('DESCRIPTOR',)
+
+ def __init__(self, descriptor, getter, setter, doc):
+ property.__init__(self, getter, setter, doc=doc)
+ self.DESCRIPTOR = descriptor
+
+
+def _AddPropertiesForRepeatedField(field, cls):
+ """Adds a public property for a "repeated" protocol message field. Clients
+ can use this property to get the value of the field, which will be either a
+ RepeatedScalarFieldContainer or RepeatedCompositeFieldContainer (see
+ below).
+
+ Note that when clients add values to these containers, we perform
+ type-checking in the case of repeated scalar fields, and we also set any
+ necessary "has" bits as a side-effect.
+
+ Args:
+ field: A FieldDescriptor for this field.
+ cls: The class we're constructing.
+ """
+ proto_field_name = field.name
+ property_name = _PropertyName(proto_field_name)
+
+ def getter(self):
+ field_value = self._fields.get(field)
+ if field_value is None:
+ # Construct a new object to represent this field.
+ field_value = field._default_constructor(self)
+
+ # Atomically check if another thread has preempted us and, if not, swap
+ # in the new object we just created. If someone has preempted us, we
+ # take that object and discard ours.
+ # WARNING: We are relying on setdefault() being atomic. This is true
+ # in CPython but we haven't investigated others. This warning appears
+ # in several other locations in this file.
+ field_value = self._fields.setdefault(field, field_value)
+ return field_value
+ getter.__module__ = None
+ getter.__doc__ = 'Getter for %s.' % proto_field_name
+
+ # We define a setter just so we can throw an exception with a more
+ # helpful error message.
+ def setter(self, new_value):
+ raise AttributeError('Assignment not allowed to repeated field '
+ '"%s" in protocol message object.' % proto_field_name)
+
+ doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
+ setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
+
+
+def _AddPropertiesForNonRepeatedScalarField(field, cls):
+ """Adds a public property for a nonrepeated, scalar protocol message field.
+ Clients can use this property to get and directly set the value of the field.
+ Note that when the client sets the value of a field by using this property,
+ all necessary "has" bits are set as a side-effect, and we also perform
+ type-checking.
+
+ Args:
+ field: A FieldDescriptor for this field.
+ cls: The class we're constructing.
+ """
+ proto_field_name = field.name
+ property_name = _PropertyName(proto_field_name)
+ type_checker = type_checkers.GetTypeChecker(field)
+ default_value = field.default_value
+
+ def getter(self):
+ # TODO: This may be broken since there may not be
+ # default_value. Combine with has_default_value somehow.
+ return self._fields.get(field, default_value)
+ getter.__module__ = None
+ getter.__doc__ = 'Getter for %s.' % proto_field_name
+
+ def field_setter(self, new_value):
+ # pylint: disable=protected-access
+ # Testing the value for truthiness captures all of the proto3 defaults
+ # (0, 0.0, enum 0, and False).
+ try:
+ new_value = type_checker.CheckValue(new_value)
+ except TypeError as e:
+ raise TypeError(
+ 'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e))
+ if not field.has_presence and not new_value:
+ self._fields.pop(field, None)
+ else:
+ self._fields[field] = new_value
+ # Check _cached_byte_size_dirty inline to improve performance, since scalar
+ # setters are called frequently.
+ if not self._cached_byte_size_dirty:
+ self._Modified()
+
+ if field.containing_oneof:
+ def setter(self, new_value):
+ field_setter(self, new_value)
+ self._UpdateOneofState(field)
+ else:
+ setter = field_setter
+
+ setter.__module__ = None
+ setter.__doc__ = 'Setter for %s.' % proto_field_name
+
+ # Add a property to encapsulate the getter/setter.
+ doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
+ setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
+
+
+def _AddPropertiesForNonRepeatedCompositeField(field, cls):
+ """Adds a public property for a nonrepeated, composite protocol message field.
+ A composite field is a "group" or "message" field.
+
+ Clients can use this property to get the value of the field, but cannot
+ assign to the property directly.
+
+ Args:
+ field: A FieldDescriptor for this field.
+ cls: The class we're constructing.
+ """
+ # TODO: Remove duplication with similar method
+ # for non-repeated scalars.
+ proto_field_name = field.name
+ property_name = _PropertyName(proto_field_name)
+
+ def getter(self):
+ field_value = self._fields.get(field)
+ if field_value is None:
+ # Construct a new object to represent this field.
+ field_value = field._default_constructor(self)
+
+ # Atomically check if another thread has preempted us and, if not, swap
+ # in the new object we just created. If someone has preempted us, we
+ # take that object and discard ours.
+ # WARNING: We are relying on setdefault() being atomic. This is true
+ # in CPython but we haven't investigated others. This warning appears
+ # in several other locations in this file.
+ field_value = self._fields.setdefault(field, field_value)
+ return field_value
+ getter.__module__ = None
+ getter.__doc__ = 'Getter for %s.' % proto_field_name
+
+ # We define a setter just so we can throw an exception with a more
+ # helpful error message.
+ def setter(self, new_value):
+ raise AttributeError('Assignment not allowed to composite field '
+ '"%s" in protocol message object.' % proto_field_name)
+
+ # Add a property to encapsulate the getter.
+ doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
+ setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
+
+
+def _AddPropertiesForExtensions(descriptor, cls):
+ """Adds properties for all fields in this protocol message type."""
+ extensions = descriptor.extensions_by_name
+ for extension_name, extension_field in extensions.items():
+ constant_name = extension_name.upper() + '_FIELD_NUMBER'
+ setattr(cls, constant_name, extension_field.number)
+
+ # TODO: Migrate all users of these attributes to functions like
+ # pool.FindExtensionByNumber(descriptor).
+ if descriptor.file is not None:
+ # TODO: Use cls.MESSAGE_FACTORY.pool when available.
+ pool = descriptor.file.pool
+
+def _AddStaticMethods(cls):
+ # TODO: This probably needs to be thread-safe(?)
+ def RegisterExtension(field_descriptor):
+ field_descriptor.containing_type = cls.DESCRIPTOR
+ # TODO: Use cls.MESSAGE_FACTORY.pool when available.
+ # pylint: disable=protected-access
+ cls.DESCRIPTOR.file.pool._AddExtensionDescriptor(field_descriptor)
+ _AttachFieldHelpers(cls, field_descriptor)
+ cls.RegisterExtension = staticmethod(RegisterExtension)
+
+ def FromString(s):
+ message = cls()
+ message.MergeFromString(s)
+ return message
+ cls.FromString = staticmethod(FromString)
+
+
+def _IsPresent(item):
+ """Given a (FieldDescriptor, value) tuple from _fields, return true if the
+ value should be included in the list returned by ListFields()."""
+
+ if item[0].label == _FieldDescriptor.LABEL_REPEATED:
+ return bool(item[1])
+ elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ return item[1]._is_present_in_parent
+ else:
+ return True
+
+
+def _AddListFieldsMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+
+ def ListFields(self):
+ all_fields = [item for item in self._fields.items() if _IsPresent(item)]
+ all_fields.sort(key = lambda item: item[0].number)
+ return all_fields
+
+ cls.ListFields = ListFields
+
+
+def _AddHasFieldMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+
+ hassable_fields = {}
+ for field in message_descriptor.fields:
+ if field.label == _FieldDescriptor.LABEL_REPEATED:
+ continue
+ # For proto3, only submessages and fields inside a oneof have presence.
+ if not field.has_presence:
+ continue
+ hassable_fields[field.name] = field
+
+ # Has methods are supported for oneof descriptors.
+ for oneof in message_descriptor.oneofs:
+ hassable_fields[oneof.name] = oneof
+
+ def HasField(self, field_name):
+ try:
+ field = hassable_fields[field_name]
+ except KeyError as exc:
+ raise ValueError('Protocol message %s has no non-repeated field "%s" '
+ 'nor has presence is not available for this field.' % (
+ message_descriptor.full_name, field_name)) from exc
+
+ if isinstance(field, descriptor_mod.OneofDescriptor):
+ try:
+ return HasField(self, self._oneofs[field].name)
+ except KeyError:
+ return False
+ else:
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ value = self._fields.get(field)
+ return value is not None and value._is_present_in_parent
+ else:
+ return field in self._fields
+
+ cls.HasField = HasField
+
+
+def _AddClearFieldMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+ def ClearField(self, field_name):
+ try:
+ field = message_descriptor.fields_by_name[field_name]
+ except KeyError:
+ try:
+ field = message_descriptor.oneofs_by_name[field_name]
+ if field in self._oneofs:
+ field = self._oneofs[field]
+ else:
+ return
+ except KeyError:
+ raise ValueError('Protocol message %s has no "%s" field.' %
+ (message_descriptor.name, field_name))
+
+ if field in self._fields:
+ # To match the C++ implementation, we need to invalidate iterators
+ # for map fields when ClearField() happens.
+ if hasattr(self._fields[field], 'InvalidateIterators'):
+ self._fields[field].InvalidateIterators()
+
+ # Note: If the field is a sub-message, its listener will still point
+ # at us. That's fine, because the worst than can happen is that it
+ # will call _Modified() and invalidate our byte size. Big deal.
+ del self._fields[field]
+
+ if self._oneofs.get(field.containing_oneof, None) is field:
+ del self._oneofs[field.containing_oneof]
+
+ # Always call _Modified() -- even if nothing was changed, this is
+ # a mutating method, and thus calling it should cause the field to become
+ # present in the parent message.
+ self._Modified()
+
+ cls.ClearField = ClearField
+
+
+def _AddClearExtensionMethod(cls):
+ """Helper for _AddMessageMethods()."""
+ def ClearExtension(self, field_descriptor):
+ extension_dict._VerifyExtensionHandle(self, field_descriptor)
+
+ # Similar to ClearField(), above.
+ if field_descriptor in self._fields:
+ del self._fields[field_descriptor]
+ self._Modified()
+ cls.ClearExtension = ClearExtension
+
+
+def _AddHasExtensionMethod(cls):
+ """Helper for _AddMessageMethods()."""
+ def HasExtension(self, field_descriptor):
+ extension_dict._VerifyExtensionHandle(self, field_descriptor)
+ if field_descriptor.label == _FieldDescriptor.LABEL_REPEATED:
+ raise KeyError('"%s" is repeated.' % field_descriptor.full_name)
+
+ if field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ value = self._fields.get(field_descriptor)
+ return value is not None and value._is_present_in_parent
+ else:
+ return field_descriptor in self._fields
+ cls.HasExtension = HasExtension
+
+def _InternalUnpackAny(msg):
+ """Unpacks Any message and returns the unpacked message.
+
+ This internal method is different from public Any Unpack method which takes
+ the target message as argument. _InternalUnpackAny method does not have
+ target message type and need to find the message type in descriptor pool.
+
+ Args:
+ msg: An Any message to be unpacked.
+
+ Returns:
+ The unpacked message.
+ """
+ # TODO: Don't use the factory of generated messages.
+ # To make Any work with custom factories, use the message factory of the
+ # parent message.
+ # pylint: disable=g-import-not-at-top
+ from google.protobuf import symbol_database
+ factory = symbol_database.Default()
+
+ type_url = msg.type_url
+
+ if not type_url:
+ return None
+
+ # TODO: For now we just strip the hostname. Better logic will be
+ # required.
+ type_name = type_url.split('/')[-1]
+ descriptor = factory.pool.FindMessageTypeByName(type_name)
+
+ if descriptor is None:
+ return None
+
+ message_class = factory.GetPrototype(descriptor)
+ message = message_class()
+
+ message.ParseFromString(msg.value)
+ return message
+
+
+def _AddEqualsMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+ def __eq__(self, other):
+ if (not isinstance(other, message_mod.Message) or
+ other.DESCRIPTOR != self.DESCRIPTOR):
+ return NotImplemented
+
+ if self is other:
+ return True
+
+ if self.DESCRIPTOR.full_name == _AnyFullTypeName:
+ any_a = _InternalUnpackAny(self)
+ any_b = _InternalUnpackAny(other)
+ if any_a and any_b:
+ return any_a == any_b
+
+ if not self.ListFields() == other.ListFields():
+ return False
+
+ # TODO: Fix UnknownFieldSet to consider MessageSet extensions,
+ # then use it for the comparison.
+ unknown_fields = list(self._unknown_fields)
+ unknown_fields.sort()
+ other_unknown_fields = list(other._unknown_fields)
+ other_unknown_fields.sort()
+ return unknown_fields == other_unknown_fields
+
+ cls.__eq__ = __eq__
+
+
+def _AddStrMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+ def __str__(self):
+ return text_format.MessageToString(self)
+ cls.__str__ = __str__
+
+
+def _AddReprMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+ def __repr__(self):
+ return text_format.MessageToString(self)
+ cls.__repr__ = __repr__
+
+
+def _AddUnicodeMethod(unused_message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+
+ def __unicode__(self):
+ return text_format.MessageToString(self, as_utf8=True).decode('utf-8')
+ cls.__unicode__ = __unicode__
+
+
+def _BytesForNonRepeatedElement(value, field_number, field_type):
+ """Returns the number of bytes needed to serialize a non-repeated element.
+ The returned byte count includes space for tag information and any
+ other additional space associated with serializing value.
+
+ Args:
+ value: Value we're serializing.
+ field_number: Field number of this value. (Since the field number
+ is stored as part of a varint-encoded tag, this has an impact
+ on the total bytes required to serialize the value).
+ field_type: The type of the field. One of the TYPE_* constants
+ within FieldDescriptor.
+ """
+ try:
+ fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type]
+ return fn(field_number, value)
+ except KeyError:
+ raise message_mod.EncodeError('Unrecognized field type: %d' % field_type)
+
+
+def _AddByteSizeMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+
+ def ByteSize(self):
+ if not self._cached_byte_size_dirty:
+ return self._cached_byte_size
+
+ size = 0
+ descriptor = self.DESCRIPTOR
+ if descriptor._is_map_entry:
+ # Fields of map entry should always be serialized.
+ key_field = descriptor.fields_by_name['key']
+ _MaybeAddEncoder(cls, key_field)
+ size = key_field._sizer(self.key)
+ value_field = descriptor.fields_by_name['value']
+ _MaybeAddEncoder(cls, value_field)
+ size += value_field._sizer(self.value)
+ else:
+ for field_descriptor, field_value in self.ListFields():
+ _MaybeAddEncoder(cls, field_descriptor)
+ size += field_descriptor._sizer(field_value)
+ for tag_bytes, value_bytes in self._unknown_fields:
+ size += len(tag_bytes) + len(value_bytes)
+
+ self._cached_byte_size = size
+ self._cached_byte_size_dirty = False
+ self._listener_for_children.dirty = False
+ return size
+
+ cls.ByteSize = ByteSize
+
+
+def _AddSerializeToStringMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+
+ def SerializeToString(self, **kwargs):
+ # Check if the message has all of its required fields set.
+ if not self.IsInitialized():
+ raise message_mod.EncodeError(
+ 'Message %s is missing required fields: %s' % (
+ self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors())))
+ return self.SerializePartialToString(**kwargs)
+ cls.SerializeToString = SerializeToString
+
+
+def _AddSerializePartialToStringMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+
+ def SerializePartialToString(self, **kwargs):
+ out = BytesIO()
+ self._InternalSerialize(out.write, **kwargs)
+ return out.getvalue()
+ cls.SerializePartialToString = SerializePartialToString
+
+ def InternalSerialize(self, write_bytes, deterministic=None):
+ if deterministic is None:
+ deterministic = (
+ api_implementation.IsPythonDefaultSerializationDeterministic())
+ else:
+ deterministic = bool(deterministic)
+
+ descriptor = self.DESCRIPTOR
+ if descriptor._is_map_entry:
+ # Fields of map entry should always be serialized.
+ key_field = descriptor.fields_by_name['key']
+ _MaybeAddEncoder(cls, key_field)
+ key_field._encoder(write_bytes, self.key, deterministic)
+ value_field = descriptor.fields_by_name['value']
+ _MaybeAddEncoder(cls, value_field)
+ value_field._encoder(write_bytes, self.value, deterministic)
+ else:
+ for field_descriptor, field_value in self.ListFields():
+ _MaybeAddEncoder(cls, field_descriptor)
+ field_descriptor._encoder(write_bytes, field_value, deterministic)
+ for tag_bytes, value_bytes in self._unknown_fields:
+ write_bytes(tag_bytes)
+ write_bytes(value_bytes)
+ cls._InternalSerialize = InternalSerialize
+
+
+def _AddMergeFromStringMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+ def MergeFromString(self, serialized):
+ serialized = memoryview(serialized)
+ length = len(serialized)
+ try:
+ if self._InternalParse(serialized, 0, length) != length:
+ # The only reason _InternalParse would return early is if it
+ # encountered an end-group tag.
+ raise message_mod.DecodeError('Unexpected end-group tag.')
+ except (IndexError, TypeError):
+ # Now ord(buf[p:p+1]) == ord('') gets TypeError.
+ raise message_mod.DecodeError('Truncated message.')
+ except struct.error as e:
+ raise message_mod.DecodeError(e)
+ return length # Return this for legacy reasons.
+ cls.MergeFromString = MergeFromString
+
+ local_ReadTag = decoder.ReadTag
+ local_SkipField = decoder.SkipField
+ fields_by_tag = cls._fields_by_tag
+ message_set_decoders_by_tag = cls._message_set_decoders_by_tag
+
+ def InternalParse(self, buffer, pos, end):
+ """Create a message from serialized bytes.
+
+ Args:
+ self: Message, instance of the proto message object.
+ buffer: memoryview of the serialized data.
+ pos: int, position to start in the serialized data.
+ end: int, end position of the serialized data.
+
+ Returns:
+ Message object.
+ """
+ # Guard against internal misuse, since this function is called internally
+ # quite extensively, and its easy to accidentally pass bytes.
+ assert isinstance(buffer, memoryview)
+ self._Modified()
+ field_dict = self._fields
+ # pylint: disable=protected-access
+ unknown_field_set = self._unknown_field_set
+ while pos != end:
+ (tag_bytes, new_pos) = local_ReadTag(buffer, pos)
+ field_decoder, field_des = message_set_decoders_by_tag.get(
+ tag_bytes, (None, None)
+ )
+ if field_decoder:
+ pos = field_decoder(buffer, new_pos, end, self, field_dict)
+ continue
+ field_des, is_packed = fields_by_tag.get(tag_bytes, (None, None))
+ if field_des is None:
+ if not self._unknown_fields: # pylint: disable=protected-access
+ self._unknown_fields = [] # pylint: disable=protected-access
+ if unknown_field_set is None:
+ # pylint: disable=protected-access
+ self._unknown_field_set = containers.UnknownFieldSet()
+ # pylint: disable=protected-access
+ unknown_field_set = self._unknown_field_set
+ # pylint: disable=protected-access
+ (tag, _) = decoder._DecodeVarint(tag_bytes, 0)
+ field_number, wire_type = wire_format.UnpackTag(tag)
+ if field_number == 0:
+ raise message_mod.DecodeError('Field number 0 is illegal.')
+ # TODO: remove old_pos.
+ old_pos = new_pos
+ (data, new_pos) = decoder._DecodeUnknownField(
+ buffer, new_pos, wire_type) # pylint: disable=protected-access
+ if new_pos == -1:
+ return pos
+ # pylint: disable=protected-access
+ unknown_field_set._add(field_number, wire_type, data)
+ # TODO: remove _unknown_fields.
+ new_pos = local_SkipField(buffer, old_pos, end, tag_bytes)
+ if new_pos == -1:
+ return pos
+ self._unknown_fields.append(
+ (tag_bytes, buffer[old_pos:new_pos].tobytes()))
+ pos = new_pos
+ else:
+ _MaybeAddDecoder(cls, field_des)
+ field_decoder = field_des._decoders[is_packed]
+ pos = field_decoder(buffer, new_pos, end, self, field_dict)
+ if field_des.containing_oneof:
+ self._UpdateOneofState(field_des)
+ return pos
+ cls._InternalParse = InternalParse
+
+
+def _AddIsInitializedMethod(message_descriptor, cls):
+ """Adds the IsInitialized and FindInitializationError methods to the
+ protocol message class."""
+
+ required_fields = [field for field in message_descriptor.fields
+ if field.label == _FieldDescriptor.LABEL_REQUIRED]
+
+ def IsInitialized(self, errors=None):
+ """Checks if all required fields of a message are set.
+
+ Args:
+ errors: A list which, if provided, will be populated with the field
+ paths of all missing required fields.
+
+ Returns:
+ True iff the specified message has all required fields set.
+ """
+
+ # Performance is critical so we avoid HasField() and ListFields().
+
+ for field in required_fields:
+ if (field not in self._fields or
+ (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and
+ not self._fields[field]._is_present_in_parent)):
+ if errors is not None:
+ errors.extend(self.FindInitializationErrors())
+ return False
+
+ for field, value in list(self._fields.items()): # dict can change size!
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ if field.label == _FieldDescriptor.LABEL_REPEATED:
+ if (field.message_type._is_map_entry):
+ continue
+ for element in value:
+ if not element.IsInitialized():
+ if errors is not None:
+ errors.extend(self.FindInitializationErrors())
+ return False
+ elif value._is_present_in_parent and not value.IsInitialized():
+ if errors is not None:
+ errors.extend(self.FindInitializationErrors())
+ return False
+
+ return True
+
+ cls.IsInitialized = IsInitialized
+
+ def FindInitializationErrors(self):
+ """Finds required fields which are not initialized.
+
+ Returns:
+ A list of strings. Each string is a path to an uninitialized field from
+ the top-level message, e.g. "foo.bar[5].baz".
+ """
+
+ errors = [] # simplify things
+
+ for field in required_fields:
+ if not self.HasField(field.name):
+ errors.append(field.name)
+
+ for field, value in self.ListFields():
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ if field.is_extension:
+ name = '(%s)' % field.full_name
+ else:
+ name = field.name
+
+ if _IsMapField(field):
+ if _IsMessageMapField(field):
+ for key in value:
+ element = value[key]
+ prefix = '%s[%s].' % (name, key)
+ sub_errors = element.FindInitializationErrors()
+ errors += [prefix + error for error in sub_errors]
+ else:
+ # ScalarMaps can't have any initialization errors.
+ pass
+ elif field.label == _FieldDescriptor.LABEL_REPEATED:
+ for i in range(len(value)):
+ element = value[i]
+ prefix = '%s[%d].' % (name, i)
+ sub_errors = element.FindInitializationErrors()
+ errors += [prefix + error for error in sub_errors]
+ else:
+ prefix = name + '.'
+ sub_errors = value.FindInitializationErrors()
+ errors += [prefix + error for error in sub_errors]
+
+ return errors
+
+ cls.FindInitializationErrors = FindInitializationErrors
+
+
+def _FullyQualifiedClassName(klass):
+ module = klass.__module__
+ name = getattr(klass, '__qualname__', klass.__name__)
+ if module in (None, 'builtins', '__builtin__'):
+ return name
+ return module + '.' + name
+
+
+def _AddMergeFromMethod(cls):
+ LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED
+ CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE
+
+ def MergeFrom(self, msg):
+ if not isinstance(msg, cls):
+ raise TypeError(
+ 'Parameter to MergeFrom() must be instance of same class: '
+ 'expected %s got %s.' % (_FullyQualifiedClassName(cls),
+ _FullyQualifiedClassName(msg.__class__)))
+
+ assert msg is not self
+ self._Modified()
+
+ fields = self._fields
+
+ for field, value in msg._fields.items():
+ if field.label == LABEL_REPEATED:
+ field_value = fields.get(field)
+ if field_value is None:
+ # Construct a new object to represent this field.
+ field_value = field._default_constructor(self)
+ fields[field] = field_value
+ field_value.MergeFrom(value)
+ elif field.cpp_type == CPPTYPE_MESSAGE:
+ if value._is_present_in_parent:
+ field_value = fields.get(field)
+ if field_value is None:
+ # Construct a new object to represent this field.
+ field_value = field._default_constructor(self)
+ fields[field] = field_value
+ field_value.MergeFrom(value)
+ else:
+ self._fields[field] = value
+ if field.containing_oneof:
+ self._UpdateOneofState(field)
+
+ if msg._unknown_fields:
+ if not self._unknown_fields:
+ self._unknown_fields = []
+ self._unknown_fields.extend(msg._unknown_fields)
+ # pylint: disable=protected-access
+ if self._unknown_field_set is None:
+ self._unknown_field_set = containers.UnknownFieldSet()
+ self._unknown_field_set._extend(msg._unknown_field_set)
+
+ cls.MergeFrom = MergeFrom
+
+
+def _AddWhichOneofMethod(message_descriptor, cls):
+ def WhichOneof(self, oneof_name):
+ """Returns the name of the currently set field inside a oneof, or None."""
+ try:
+ field = message_descriptor.oneofs_by_name[oneof_name]
+ except KeyError:
+ raise ValueError(
+ 'Protocol message has no oneof "%s" field.' % oneof_name)
+
+ nested_field = self._oneofs.get(field, None)
+ if nested_field is not None and self.HasField(nested_field.name):
+ return nested_field.name
+ else:
+ return None
+
+ cls.WhichOneof = WhichOneof
+
+
+def _Clear(self):
+ # Clear fields.
+ self._fields = {}
+ self._unknown_fields = ()
+ # pylint: disable=protected-access
+ if self._unknown_field_set is not None:
+ self._unknown_field_set._clear()
+ self._unknown_field_set = None
+
+ self._oneofs = {}
+ self._Modified()
+
+
+def _UnknownFields(self):
+ warnings.warn(
+ 'message.UnknownFields() is deprecated. Please use the add one '
+ 'feature unknown_fields.UnknownFieldSet(message) in '
+ 'unknown_fields.py instead.'
+ )
+ if self._unknown_field_set is None: # pylint: disable=protected-access
+ # pylint: disable=protected-access
+ self._unknown_field_set = containers.UnknownFieldSet()
+ return self._unknown_field_set # pylint: disable=protected-access
+
+
+def _DiscardUnknownFields(self):
+ self._unknown_fields = []
+ self._unknown_field_set = None # pylint: disable=protected-access
+ for field, value in self.ListFields():
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ if _IsMapField(field):
+ if _IsMessageMapField(field):
+ for key in value:
+ value[key].DiscardUnknownFields()
+ elif field.label == _FieldDescriptor.LABEL_REPEATED:
+ for sub_message in value:
+ sub_message.DiscardUnknownFields()
+ else:
+ value.DiscardUnknownFields()
+
+
+def _SetListener(self, listener):
+ if listener is None:
+ self._listener = message_listener_mod.NullMessageListener()
+ else:
+ self._listener = listener
+
+
+def _AddMessageMethods(message_descriptor, cls):
+ """Adds implementations of all Message methods to cls."""
+ _AddListFieldsMethod(message_descriptor, cls)
+ _AddHasFieldMethod(message_descriptor, cls)
+ _AddClearFieldMethod(message_descriptor, cls)
+ if message_descriptor.is_extendable:
+ _AddClearExtensionMethod(cls)
+ _AddHasExtensionMethod(cls)
+ _AddEqualsMethod(message_descriptor, cls)
+ _AddStrMethod(message_descriptor, cls)
+ _AddReprMethod(message_descriptor, cls)
+ _AddUnicodeMethod(message_descriptor, cls)
+ _AddByteSizeMethod(message_descriptor, cls)
+ _AddSerializeToStringMethod(message_descriptor, cls)
+ _AddSerializePartialToStringMethod(message_descriptor, cls)
+ _AddMergeFromStringMethod(message_descriptor, cls)
+ _AddIsInitializedMethod(message_descriptor, cls)
+ _AddMergeFromMethod(cls)
+ _AddWhichOneofMethod(message_descriptor, cls)
+ # Adds methods which do not depend on cls.
+ cls.Clear = _Clear
+ cls.UnknownFields = _UnknownFields
+ cls.DiscardUnknownFields = _DiscardUnknownFields
+ cls._SetListener = _SetListener
+
+
+def _AddPrivateHelperMethods(message_descriptor, cls):
+ """Adds implementation of private helper methods to cls."""
+
+ def Modified(self):
+ """Sets the _cached_byte_size_dirty bit to true,
+ and propagates this to our listener iff this was a state change.
+ """
+
+ # Note: Some callers check _cached_byte_size_dirty before calling
+ # _Modified() as an extra optimization. So, if this method is ever
+ # changed such that it does stuff even when _cached_byte_size_dirty is
+ # already true, the callers need to be updated.
+ if not self._cached_byte_size_dirty:
+ self._cached_byte_size_dirty = True
+ self._listener_for_children.dirty = True
+ self._is_present_in_parent = True
+ self._listener.Modified()
+
+ def _UpdateOneofState(self, field):
+ """Sets field as the active field in its containing oneof.
+
+ Will also delete currently active field in the oneof, if it is different
+ from the argument. Does not mark the message as modified.
+ """
+ other_field = self._oneofs.setdefault(field.containing_oneof, field)
+ if other_field is not field:
+ del self._fields[other_field]
+ self._oneofs[field.containing_oneof] = field
+
+ cls._Modified = Modified
+ cls.SetInParent = Modified
+ cls._UpdateOneofState = _UpdateOneofState
+
+
+class _Listener(object):
+
+ """MessageListener implementation that a parent message registers with its
+ child message.
+
+ In order to support semantics like:
+
+ foo.bar.baz.moo = 23
+ assert foo.HasField('bar')
+
+ ...child objects must have back references to their parents.
+ This helper class is at the heart of this support.
+ """
+
+ def __init__(self, parent_message):
+ """Args:
+ parent_message: The message whose _Modified() method we should call when
+ we receive Modified() messages.
+ """
+ # This listener establishes a back reference from a child (contained) object
+ # to its parent (containing) object. We make this a weak reference to avoid
+ # creating cyclic garbage when the client finishes with the 'parent' object
+ # in the tree.
+ if isinstance(parent_message, weakref.ProxyType):
+ self._parent_message_weakref = parent_message
+ else:
+ self._parent_message_weakref = weakref.proxy(parent_message)
+
+ # As an optimization, we also indicate directly on the listener whether
+ # or not the parent message is dirty. This way we can avoid traversing
+ # up the tree in the common case.
+ self.dirty = False
+
+ def Modified(self):
+ if self.dirty:
+ return
+ try:
+ # Propagate the signal to our parents iff this is the first field set.
+ self._parent_message_weakref._Modified()
+ except ReferenceError:
+ # We can get here if a client has kept a reference to a child object,
+ # and is now setting a field on it, but the child's parent has been
+ # garbage-collected. This is not an error.
+ pass
+
+
+class _OneofListener(_Listener):
+ """Special listener implementation for setting composite oneof fields."""
+
+ def __init__(self, parent_message, field):
+ """Args:
+ parent_message: The message whose _Modified() method we should call when
+ we receive Modified() messages.
+ field: The descriptor of the field being set in the parent message.
+ """
+ super(_OneofListener, self).__init__(parent_message)
+ self._field = field
+
+ def Modified(self):
+ """Also updates the state of the containing oneof in the parent message."""
+ try:
+ self._parent_message_weakref._UpdateOneofState(self._field)
+ super(_OneofListener, self).Modified()
+ except ReferenceError:
+ pass
diff --git a/Lib/site-packages/google/protobuf/internal/testing_refleaks.py b/Lib/site-packages/google/protobuf/internal/testing_refleaks.py
new file mode 100644
index 0000000..ca0f0b9
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/internal/testing_refleaks.py
@@ -0,0 +1,119 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A subclass of unittest.TestCase which checks for reference leaks.
+
+To use:
+- Use testing_refleak.BaseTestCase instead of unittest.TestCase
+- Configure and compile Python with --with-pydebug
+
+If sys.gettotalrefcount() is not available (because Python was built without
+the Py_DEBUG option), then this module is a no-op and tests will run normally.
+"""
+
+import copyreg
+import gc
+import sys
+import unittest
+
+
+class LocalTestResult(unittest.TestResult):
+ """A TestResult which forwards events to a parent object, except for Skips."""
+
+ def __init__(self, parent_result):
+ unittest.TestResult.__init__(self)
+ self.parent_result = parent_result
+
+ def addError(self, test, error):
+ self.parent_result.addError(test, error)
+
+ def addFailure(self, test, error):
+ self.parent_result.addFailure(test, error)
+
+ def addSkip(self, test, reason):
+ pass
+
+
+class ReferenceLeakCheckerMixin(object):
+ """A mixin class for TestCase, which checks reference counts."""
+
+ NB_RUNS = 3
+
+ def run(self, result=None):
+ testMethod = getattr(self, self._testMethodName)
+ expecting_failure_method = getattr(testMethod, "__unittest_expecting_failure__", False)
+ expecting_failure_class = getattr(self, "__unittest_expecting_failure__", False)
+ if expecting_failure_class or expecting_failure_method:
+ return
+
+ # python_message.py registers all Message classes to some pickle global
+ # registry, which makes the classes immortal.
+ # We save a copy of this registry, and reset it before we could references.
+ self._saved_pickle_registry = copyreg.dispatch_table.copy()
+
+ # Run the test twice, to warm up the instance attributes.
+ super(ReferenceLeakCheckerMixin, self).run(result=result)
+ super(ReferenceLeakCheckerMixin, self).run(result=result)
+
+ oldrefcount = 0
+ local_result = LocalTestResult(result)
+ num_flakes = 0
+
+ refcount_deltas = []
+ while len(refcount_deltas) < self.NB_RUNS:
+ oldrefcount = self._getRefcounts()
+ super(ReferenceLeakCheckerMixin, self).run(result=local_result)
+ newrefcount = self._getRefcounts()
+ # If the GC was able to collect some objects after the call to run() that
+ # it could not collect before the call, then the counts won't match.
+ if newrefcount < oldrefcount and num_flakes < 2:
+ # This result is (probably) a flake -- garbage collectors aren't very
+ # predictable, but a lower ending refcount is the opposite of the
+ # failure we are testing for. If the result is repeatable, then we will
+ # eventually report it, but not after trying to eliminate it.
+ num_flakes += 1
+ continue
+ num_flakes = 0
+ refcount_deltas.append(newrefcount - oldrefcount)
+ print(refcount_deltas, self)
+
+ try:
+ self.assertEqual(refcount_deltas, [0] * self.NB_RUNS)
+ except Exception: # pylint: disable=broad-except
+ result.addError(self, sys.exc_info())
+
+ def _getRefcounts(self):
+ copyreg.dispatch_table.clear()
+ copyreg.dispatch_table.update(self._saved_pickle_registry)
+ # It is sometimes necessary to gc.collect() multiple times, to ensure
+ # that all objects can be collected.
+ gc.collect()
+ gc.collect()
+ gc.collect()
+ return sys.gettotalrefcount()
+
+
+if hasattr(sys, 'gettotalrefcount'):
+
+ def TestCase(test_class):
+ new_bases = (ReferenceLeakCheckerMixin,) + test_class.__bases__
+ new_class = type(test_class)(
+ test_class.__name__, new_bases, dict(test_class.__dict__))
+ return new_class
+ SkipReferenceLeakChecker = unittest.skip
+
+else:
+ # When PyDEBUG is not enabled, run the tests normally.
+
+ def TestCase(test_class):
+ return test_class
+
+ def SkipReferenceLeakChecker(reason):
+ del reason # Don't skip, so don't need a reason.
+ def Same(func):
+ return func
+ return Same
diff --git a/Lib/site-packages/google/protobuf/internal/type_checkers.py b/Lib/site-packages/google/protobuf/internal/type_checkers.py
new file mode 100644
index 0000000..e152a43
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/internal/type_checkers.py
@@ -0,0 +1,408 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Provides type checking routines.
+
+This module defines type checking utilities in the forms of dictionaries:
+
+VALUE_CHECKERS: A dictionary of field types and a value validation object.
+TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing
+ function.
+TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization
+ function.
+FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their
+ corresponding wire types.
+TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization
+ function.
+"""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+import ctypes
+import numbers
+
+from google.protobuf.internal import decoder
+from google.protobuf.internal import encoder
+from google.protobuf.internal import wire_format
+from google.protobuf import descriptor
+
+_FieldDescriptor = descriptor.FieldDescriptor
+
+
+def TruncateToFourByteFloat(original):
+ return ctypes.c_float(original).value
+
+
+def ToShortestFloat(original):
+ """Returns the shortest float that has same value in wire."""
+ # All 4 byte floats have between 6 and 9 significant digits, so we
+ # start with 6 as the lower bound.
+ # It has to be iterative because use '.9g' directly can not get rid
+ # of the noises for most values. For example if set a float_field=0.9
+ # use '.9g' will print 0.899999976.
+ precision = 6
+ rounded = float('{0:.{1}g}'.format(original, precision))
+ while TruncateToFourByteFloat(rounded) != original:
+ precision += 1
+ rounded = float('{0:.{1}g}'.format(original, precision))
+ return rounded
+
+
+def GetTypeChecker(field):
+ """Returns a type checker for a message field of the specified types.
+
+ Args:
+ field: FieldDescriptor object for this field.
+
+ Returns:
+ An instance of TypeChecker which can be used to verify the types
+ of values assigned to a field of the specified type.
+ """
+ if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and
+ field.type == _FieldDescriptor.TYPE_STRING):
+ return UnicodeValueChecker()
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
+ if field.enum_type.is_closed:
+ return EnumValueChecker(field.enum_type)
+ else:
+ # When open enums are supported, any int32 can be assigned.
+ return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32]
+ return _VALUE_CHECKERS[field.cpp_type]
+
+
+# None of the typecheckers below make any attempt to guard against people
+# subclassing builtin types and doing weird things. We're not trying to
+# protect against malicious clients here, just people accidentally shooting
+# themselves in the foot in obvious ways.
+class TypeChecker(object):
+
+ """Type checker used to catch type errors as early as possible
+ when the client is setting scalar fields in protocol messages.
+ """
+
+ def __init__(self, *acceptable_types):
+ self._acceptable_types = acceptable_types
+
+ def CheckValue(self, proposed_value):
+ """Type check the provided value and return it.
+
+ The returned value might have been normalized to another type.
+ """
+ if not isinstance(proposed_value, self._acceptable_types):
+ message = ('%.1024r has type %s, but expected one of: %s' %
+ (proposed_value, type(proposed_value), self._acceptable_types))
+ raise TypeError(message)
+ return proposed_value
+
+
+class TypeCheckerWithDefault(TypeChecker):
+
+ def __init__(self, default_value, *acceptable_types):
+ TypeChecker.__init__(self, *acceptable_types)
+ self._default_value = default_value
+
+ def DefaultValue(self):
+ return self._default_value
+
+
+class BoolValueChecker(object):
+ """Type checker used for bool fields."""
+
+ def CheckValue(self, proposed_value):
+ if not hasattr(proposed_value, '__index__') or (
+ type(proposed_value).__module__ == 'numpy' and
+ type(proposed_value).__name__ == 'ndarray'):
+ message = ('%.1024r has type %s, but expected one of: %s' %
+ (proposed_value, type(proposed_value), (bool, int)))
+ raise TypeError(message)
+ return bool(proposed_value)
+
+ def DefaultValue(self):
+ return False
+
+
+# IntValueChecker and its subclasses perform integer type-checks
+# and bounds-checks.
+class IntValueChecker(object):
+
+ """Checker used for integer fields. Performs type-check and range check."""
+
+ def CheckValue(self, proposed_value):
+ if not hasattr(proposed_value, '__index__') or (
+ type(proposed_value).__module__ == 'numpy' and
+ type(proposed_value).__name__ == 'ndarray'):
+ message = ('%.1024r has type %s, but expected one of: %s' %
+ (proposed_value, type(proposed_value), (int,)))
+ raise TypeError(message)
+
+ if not self._MIN <= int(proposed_value) <= self._MAX:
+ raise ValueError('Value out of range: %d' % proposed_value)
+ # We force all values to int to make alternate implementations where the
+ # distinction is more significant (e.g. the C++ implementation) simpler.
+ proposed_value = int(proposed_value)
+ return proposed_value
+
+ def DefaultValue(self):
+ return 0
+
+
+class EnumValueChecker(object):
+
+ """Checker used for enum fields. Performs type-check and range check."""
+
+ def __init__(self, enum_type):
+ self._enum_type = enum_type
+
+ def CheckValue(self, proposed_value):
+ if not isinstance(proposed_value, numbers.Integral):
+ message = ('%.1024r has type %s, but expected one of: %s' %
+ (proposed_value, type(proposed_value), (int,)))
+ raise TypeError(message)
+ if int(proposed_value) not in self._enum_type.values_by_number:
+ raise ValueError('Unknown enum value: %d' % proposed_value)
+ return proposed_value
+
+ def DefaultValue(self):
+ return self._enum_type.values[0].number
+
+
+class UnicodeValueChecker(object):
+
+ """Checker used for string fields.
+
+ Always returns a unicode value, even if the input is of type str.
+ """
+
+ def CheckValue(self, proposed_value):
+ if not isinstance(proposed_value, (bytes, str)):
+ message = ('%.1024r has type %s, but expected one of: %s' %
+ (proposed_value, type(proposed_value), (bytes, str)))
+ raise TypeError(message)
+
+ # If the value is of type 'bytes' make sure that it is valid UTF-8 data.
+ if isinstance(proposed_value, bytes):
+ try:
+ proposed_value = proposed_value.decode('utf-8')
+ except UnicodeDecodeError:
+ raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 '
+ 'encoding. Non-UTF-8 strings must be converted to '
+ 'unicode objects before being added.' %
+ (proposed_value))
+ else:
+ try:
+ proposed_value.encode('utf8')
+ except UnicodeEncodeError:
+ raise ValueError('%.1024r isn\'t a valid unicode string and '
+ 'can\'t be encoded in UTF-8.'%
+ (proposed_value))
+
+ return proposed_value
+
+ def DefaultValue(self):
+ return u""
+
+
+class Int32ValueChecker(IntValueChecker):
+ # We're sure to use ints instead of longs here since comparison may be more
+ # efficient.
+ _MIN = -2147483648
+ _MAX = 2147483647
+
+
+class Uint32ValueChecker(IntValueChecker):
+ _MIN = 0
+ _MAX = (1 << 32) - 1
+
+
+class Int64ValueChecker(IntValueChecker):
+ _MIN = -(1 << 63)
+ _MAX = (1 << 63) - 1
+
+
+class Uint64ValueChecker(IntValueChecker):
+ _MIN = 0
+ _MAX = (1 << 64) - 1
+
+
+# The max 4 bytes float is about 3.4028234663852886e+38
+_FLOAT_MAX = float.fromhex('0x1.fffffep+127')
+_FLOAT_MIN = -_FLOAT_MAX
+_INF = float('inf')
+_NEG_INF = float('-inf')
+
+
+class DoubleValueChecker(object):
+ """Checker used for double fields.
+
+ Performs type-check and range check.
+ """
+
+ def CheckValue(self, proposed_value):
+ """Check and convert proposed_value to float."""
+ if (not hasattr(proposed_value, '__float__') and
+ not hasattr(proposed_value, '__index__')) or (
+ type(proposed_value).__module__ == 'numpy' and
+ type(proposed_value).__name__ == 'ndarray'):
+ message = ('%.1024r has type %s, but expected one of: int, float' %
+ (proposed_value, type(proposed_value)))
+ raise TypeError(message)
+ return float(proposed_value)
+
+ def DefaultValue(self):
+ return 0.0
+
+
+class FloatValueChecker(DoubleValueChecker):
+ """Checker used for float fields.
+
+ Performs type-check and range check.
+
+ Values exceeding a 32-bit float will be converted to inf/-inf.
+ """
+
+ def CheckValue(self, proposed_value):
+ """Check and convert proposed_value to float."""
+ converted_value = super().CheckValue(proposed_value)
+ # This inf rounding matches the C++ proto SafeDoubleToFloat logic.
+ if converted_value > _FLOAT_MAX:
+ return _INF
+ if converted_value < _FLOAT_MIN:
+ return _NEG_INF
+
+ return TruncateToFourByteFloat(converted_value)
+
+# Type-checkers for all scalar CPPTYPEs.
+_VALUE_CHECKERS = {
+ _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(),
+ _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(),
+ _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(),
+ _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(),
+ _FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(),
+ _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(),
+ _FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(),
+ _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes),
+}
+
+
+# Map from field type to a function F, such that F(field_num, value)
+# gives the total byte size for a value of the given type. This
+# byte size includes tag information and any other additional space
+# associated with serializing "value".
+TYPE_TO_BYTE_SIZE_FN = {
+ _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize,
+ _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize,
+ _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize,
+ _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize,
+ _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize,
+ _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize,
+ _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize,
+ _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize,
+ _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize,
+ _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize,
+ _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize,
+ _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize,
+ _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize,
+ _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize,
+ _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize,
+ _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize,
+ _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize,
+ _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize
+ }
+
+
+# Maps from field types to encoder constructors.
+TYPE_TO_ENCODER = {
+ _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder,
+ _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder,
+ _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder,
+ _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder,
+ _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder,
+ _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder,
+ _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder,
+ _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder,
+ _FieldDescriptor.TYPE_STRING: encoder.StringEncoder,
+ _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder,
+ _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder,
+ _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder,
+ _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder,
+ _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder,
+ _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder,
+ _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder,
+ _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder,
+ _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder,
+ }
+
+
+# Maps from field types to sizer constructors.
+TYPE_TO_SIZER = {
+ _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer,
+ _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer,
+ _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer,
+ _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer,
+ _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer,
+ _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer,
+ _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer,
+ _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer,
+ _FieldDescriptor.TYPE_STRING: encoder.StringSizer,
+ _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer,
+ _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer,
+ _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer,
+ _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer,
+ _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer,
+ _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer,
+ _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer,
+ _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer,
+ _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer,
+ }
+
+
+# Maps from field type to a decoder constructor.
+TYPE_TO_DECODER = {
+ _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder,
+ _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder,
+ _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder,
+ _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder,
+ _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder,
+ _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder,
+ _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder,
+ _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder,
+ _FieldDescriptor.TYPE_STRING: decoder.StringDecoder,
+ _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder,
+ _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder,
+ _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder,
+ _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder,
+ _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder,
+ _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder,
+ _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder,
+ _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder,
+ _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder,
+ }
+
+# Maps from field type to expected wiretype.
+FIELD_TYPE_TO_WIRE_TYPE = {
+ _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64,
+ _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32,
+ _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT,
+ _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT,
+ _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT,
+ _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64,
+ _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32,
+ _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT,
+ _FieldDescriptor.TYPE_STRING:
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
+ _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP,
+ _FieldDescriptor.TYPE_MESSAGE:
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
+ _FieldDescriptor.TYPE_BYTES:
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
+ _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT,
+ _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT,
+ _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32,
+ _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64,
+ _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT,
+ _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT,
+ }
diff --git a/Lib/site-packages/google/protobuf/internal/well_known_types.py b/Lib/site-packages/google/protobuf/internal/well_known_types.py
new file mode 100644
index 0000000..5727bc9
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/internal/well_known_types.py
@@ -0,0 +1,567 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Contains well known classes.
+
+This files defines well known classes which need extra maintenance including:
+ - Any
+ - Duration
+ - FieldMask
+ - Struct
+ - Timestamp
+"""
+
+__author__ = 'jieluo@google.com (Jie Luo)'
+
+import calendar
+import collections.abc
+import datetime
+
+from google.protobuf.internal import field_mask
+
+FieldMask = field_mask.FieldMask
+
+_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
+_NANOS_PER_SECOND = 1000000000
+_NANOS_PER_MILLISECOND = 1000000
+_NANOS_PER_MICROSECOND = 1000
+_MILLIS_PER_SECOND = 1000
+_MICROS_PER_SECOND = 1000000
+_SECONDS_PER_DAY = 24 * 3600
+_DURATION_SECONDS_MAX = 315576000000
+
+_EPOCH_DATETIME_NAIVE = datetime.datetime(1970, 1, 1, tzinfo=None)
+_EPOCH_DATETIME_AWARE = _EPOCH_DATETIME_NAIVE.replace(
+ tzinfo=datetime.timezone.utc
+)
+
+
+class Any(object):
+ """Class for Any Message type."""
+
+ __slots__ = ()
+
+ def Pack(self, msg, type_url_prefix='type.googleapis.com/',
+ deterministic=None):
+ """Packs the specified message into current Any message."""
+ if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/':
+ self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
+ else:
+ self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
+ self.value = msg.SerializeToString(deterministic=deterministic)
+
+ def Unpack(self, msg):
+ """Unpacks the current Any message into specified message."""
+ descriptor = msg.DESCRIPTOR
+ if not self.Is(descriptor):
+ return False
+ msg.ParseFromString(self.value)
+ return True
+
+ def TypeName(self):
+ """Returns the protobuf type name of the inner message."""
+ # Only last part is to be used: b/25630112
+ return self.type_url.split('/')[-1]
+
+ def Is(self, descriptor):
+ """Checks if this Any represents the given protobuf type."""
+ return '/' in self.type_url and self.TypeName() == descriptor.full_name
+
+
+class Timestamp(object):
+ """Class for Timestamp message type."""
+
+ __slots__ = ()
+
+ def ToJsonString(self):
+ """Converts Timestamp to RFC 3339 date string format.
+
+ Returns:
+ A string converted from timestamp. The string is always Z-normalized
+ and uses 3, 6 or 9 fractional digits as required to represent the
+ exact time. Example of the return format: '1972-01-01T10:00:20.021Z'
+ """
+ nanos = self.nanos % _NANOS_PER_SECOND
+ total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND
+ seconds = total_sec % _SECONDS_PER_DAY
+ days = (total_sec - seconds) // _SECONDS_PER_DAY
+ dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds)
+
+ result = dt.isoformat()
+ if (nanos % 1e9) == 0:
+ # If there are 0 fractional digits, the fractional
+ # point '.' should be omitted when serializing.
+ return result + 'Z'
+ if (nanos % 1e6) == 0:
+ # Serialize 3 fractional digits.
+ return result + '.%03dZ' % (nanos / 1e6)
+ if (nanos % 1e3) == 0:
+ # Serialize 6 fractional digits.
+ return result + '.%06dZ' % (nanos / 1e3)
+ # Serialize 9 fractional digits.
+ return result + '.%09dZ' % nanos
+
+ def FromJsonString(self, value):
+ """Parse a RFC 3339 date string format to Timestamp.
+
+ Args:
+ value: A date string. Any fractional digits (or none) and any offset are
+ accepted as long as they fit into nano-seconds precision.
+ Example of accepted format: '1972-01-01T10:00:20.021-05:00'
+
+ Raises:
+ ValueError: On parsing problems.
+ """
+ if not isinstance(value, str):
+ raise ValueError('Timestamp JSON value not a string: {!r}'.format(value))
+ timezone_offset = value.find('Z')
+ if timezone_offset == -1:
+ timezone_offset = value.find('+')
+ if timezone_offset == -1:
+ timezone_offset = value.rfind('-')
+ if timezone_offset == -1:
+ raise ValueError(
+ 'Failed to parse timestamp: missing valid timezone offset.')
+ time_value = value[0:timezone_offset]
+ # Parse datetime and nanos.
+ point_position = time_value.find('.')
+ if point_position == -1:
+ second_value = time_value
+ nano_value = ''
+ else:
+ second_value = time_value[:point_position]
+ nano_value = time_value[point_position + 1:]
+ if 't' in second_value:
+ raise ValueError(
+ 'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', '
+ 'lowercase \'t\' is not accepted'.format(second_value))
+ date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFOMAT)
+ td = date_object - datetime.datetime(1970, 1, 1)
+ seconds = td.seconds + td.days * _SECONDS_PER_DAY
+ if len(nano_value) > 9:
+ raise ValueError(
+ 'Failed to parse Timestamp: nanos {0} more than '
+ '9 fractional digits.'.format(nano_value))
+ if nano_value:
+ nanos = round(float('0.' + nano_value) * 1e9)
+ else:
+ nanos = 0
+ # Parse timezone offsets.
+ if value[timezone_offset] == 'Z':
+ if len(value) != timezone_offset + 1:
+ raise ValueError('Failed to parse timestamp: invalid trailing'
+ ' data {0}.'.format(value))
+ else:
+ timezone = value[timezone_offset:]
+ pos = timezone.find(':')
+ if pos == -1:
+ raise ValueError(
+ 'Invalid timezone offset value: {0}.'.format(timezone))
+ if timezone[0] == '+':
+ seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
+ else:
+ seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
+ # Set seconds and nanos
+ self.seconds = int(seconds)
+ self.nanos = int(nanos)
+
+ def GetCurrentTime(self):
+ """Get the current UTC into Timestamp."""
+ self.FromDatetime(datetime.datetime.utcnow())
+
+ def ToNanoseconds(self):
+ """Converts Timestamp to nanoseconds since epoch."""
+ return self.seconds * _NANOS_PER_SECOND + self.nanos
+
+ def ToMicroseconds(self):
+ """Converts Timestamp to microseconds since epoch."""
+ return (self.seconds * _MICROS_PER_SECOND +
+ self.nanos // _NANOS_PER_MICROSECOND)
+
+ def ToMilliseconds(self):
+ """Converts Timestamp to milliseconds since epoch."""
+ return (self.seconds * _MILLIS_PER_SECOND +
+ self.nanos // _NANOS_PER_MILLISECOND)
+
+ def ToSeconds(self):
+ """Converts Timestamp to seconds since epoch."""
+ return self.seconds
+
+ def FromNanoseconds(self, nanos):
+ """Converts nanoseconds since epoch to Timestamp."""
+ self.seconds = nanos // _NANOS_PER_SECOND
+ self.nanos = nanos % _NANOS_PER_SECOND
+
+ def FromMicroseconds(self, micros):
+ """Converts microseconds since epoch to Timestamp."""
+ self.seconds = micros // _MICROS_PER_SECOND
+ self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND
+
+ def FromMilliseconds(self, millis):
+ """Converts milliseconds since epoch to Timestamp."""
+ self.seconds = millis // _MILLIS_PER_SECOND
+ self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND
+
+ def FromSeconds(self, seconds):
+ """Converts seconds since epoch to Timestamp."""
+ self.seconds = seconds
+ self.nanos = 0
+
+ def ToDatetime(self, tzinfo=None):
+ """Converts Timestamp to a datetime.
+
+ Args:
+ tzinfo: A datetime.tzinfo subclass; defaults to None.
+
+ Returns:
+ If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone
+ information, i.e. not aware that it's UTC).
+
+ Otherwise, returns a timezone-aware datetime in the input timezone.
+ """
+ # Using datetime.fromtimestamp for this would avoid constructing an extra
+ # timedelta object and possibly an extra datetime. Unfortuantely, that has
+ # the disadvantage of not handling the full precision (on all platforms, see
+ # https://github.com/python/cpython/issues/109849) or full range (on some
+ # platforms, see https://github.com/python/cpython/issues/110042) of
+ # datetime.
+ delta = datetime.timedelta(
+ seconds=self.seconds,
+ microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND),
+ )
+ if tzinfo is None:
+ return _EPOCH_DATETIME_NAIVE + delta
+ else:
+ # Note the tz conversion has to come after the timedelta arithmetic.
+ return (_EPOCH_DATETIME_AWARE + delta).astimezone(tzinfo)
+
+ def FromDatetime(self, dt):
+ """Converts datetime to Timestamp.
+
+ Args:
+ dt: A datetime. If it's timezone-naive, it's assumed to be in UTC.
+ """
+ # Using this guide: http://wiki.python.org/moin/WorkingWithTime
+ # And this conversion guide: http://docs.python.org/library/time.html
+
+ # Turn the date parameter into a tuple (struct_time) that can then be
+ # manipulated into a long value of seconds. During the conversion from
+ # struct_time to long, the source date in UTC, and so it follows that the
+ # correct transformation is calendar.timegm()
+ self.seconds = calendar.timegm(dt.utctimetuple())
+ self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND
+
+
+class Duration(object):
+ """Class for Duration message type."""
+
+ __slots__ = ()
+
+ def ToJsonString(self):
+ """Converts Duration to string format.
+
+ Returns:
+ A string converted from self. The string format will contains
+ 3, 6, or 9 fractional digits depending on the precision required to
+ represent the exact Duration value. For example: "1s", "1.010s",
+ "1.000000100s", "-3.100s"
+ """
+ _CheckDurationValid(self.seconds, self.nanos)
+ if self.seconds < 0 or self.nanos < 0:
+ result = '-'
+ seconds = - self.seconds + int((0 - self.nanos) // 1e9)
+ nanos = (0 - self.nanos) % 1e9
+ else:
+ result = ''
+ seconds = self.seconds + int(self.nanos // 1e9)
+ nanos = self.nanos % 1e9
+ result += '%d' % seconds
+ if (nanos % 1e9) == 0:
+ # If there are 0 fractional digits, the fractional
+ # point '.' should be omitted when serializing.
+ return result + 's'
+ if (nanos % 1e6) == 0:
+ # Serialize 3 fractional digits.
+ return result + '.%03ds' % (nanos / 1e6)
+ if (nanos % 1e3) == 0:
+ # Serialize 6 fractional digits.
+ return result + '.%06ds' % (nanos / 1e3)
+ # Serialize 9 fractional digits.
+ return result + '.%09ds' % nanos
+
+ def FromJsonString(self, value):
+ """Converts a string to Duration.
+
+ Args:
+ value: A string to be converted. The string must end with 's'. Any
+ fractional digits (or none) are accepted as long as they fit into
+ precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s
+
+ Raises:
+ ValueError: On parsing problems.
+ """
+ if not isinstance(value, str):
+ raise ValueError('Duration JSON value not a string: {!r}'.format(value))
+ if len(value) < 1 or value[-1] != 's':
+ raise ValueError(
+ 'Duration must end with letter "s": {0}.'.format(value))
+ try:
+ pos = value.find('.')
+ if pos == -1:
+ seconds = int(value[:-1])
+ nanos = 0
+ else:
+ seconds = int(value[:pos])
+ if value[0] == '-':
+ nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9))
+ else:
+ nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9))
+ _CheckDurationValid(seconds, nanos)
+ self.seconds = seconds
+ self.nanos = nanos
+ except ValueError as e:
+ raise ValueError(
+ 'Couldn\'t parse duration: {0} : {1}.'.format(value, e))
+
+ def ToNanoseconds(self):
+ """Converts a Duration to nanoseconds."""
+ return self.seconds * _NANOS_PER_SECOND + self.nanos
+
+ def ToMicroseconds(self):
+ """Converts a Duration to microseconds."""
+ micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)
+ return self.seconds * _MICROS_PER_SECOND + micros
+
+ def ToMilliseconds(self):
+ """Converts a Duration to milliseconds."""
+ millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND)
+ return self.seconds * _MILLIS_PER_SECOND + millis
+
+ def ToSeconds(self):
+ """Converts a Duration to seconds."""
+ return self.seconds
+
+ def FromNanoseconds(self, nanos):
+ """Converts nanoseconds to Duration."""
+ self._NormalizeDuration(nanos // _NANOS_PER_SECOND,
+ nanos % _NANOS_PER_SECOND)
+
+ def FromMicroseconds(self, micros):
+ """Converts microseconds to Duration."""
+ self._NormalizeDuration(
+ micros // _MICROS_PER_SECOND,
+ (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND)
+
+ def FromMilliseconds(self, millis):
+ """Converts milliseconds to Duration."""
+ self._NormalizeDuration(
+ millis // _MILLIS_PER_SECOND,
+ (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND)
+
+ def FromSeconds(self, seconds):
+ """Converts seconds to Duration."""
+ self.seconds = seconds
+ self.nanos = 0
+
+ def ToTimedelta(self):
+ """Converts Duration to timedelta."""
+ return datetime.timedelta(
+ seconds=self.seconds, microseconds=_RoundTowardZero(
+ self.nanos, _NANOS_PER_MICROSECOND))
+
+ def FromTimedelta(self, td):
+ """Converts timedelta to Duration."""
+ self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY,
+ td.microseconds * _NANOS_PER_MICROSECOND)
+
+ def _NormalizeDuration(self, seconds, nanos):
+ """Set Duration by seconds and nanos."""
+ # Force nanos to be negative if the duration is negative.
+ if seconds < 0 and nanos > 0:
+ seconds += 1
+ nanos -= _NANOS_PER_SECOND
+ self.seconds = seconds
+ self.nanos = nanos
+
+
+def _CheckDurationValid(seconds, nanos):
+ if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX:
+ raise ValueError(
+ 'Duration is not valid: Seconds {0} must be in range '
+ '[-315576000000, 315576000000].'.format(seconds))
+ if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND:
+ raise ValueError(
+ 'Duration is not valid: Nanos {0} must be in range '
+ '[-999999999, 999999999].'.format(nanos))
+ if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0):
+ raise ValueError(
+ 'Duration is not valid: Sign mismatch.')
+
+
+def _RoundTowardZero(value, divider):
+ """Truncates the remainder part after division."""
+ # For some languages, the sign of the remainder is implementation
+ # dependent if any of the operands is negative. Here we enforce
+ # "rounded toward zero" semantics. For example, for (-5) / 2 an
+ # implementation may give -3 as the result with the remainder being
+ # 1. This function ensures we always return -2 (closer to zero).
+ result = value // divider
+ remainder = value % divider
+ if result < 0 and remainder > 0:
+ return result + 1
+ else:
+ return result
+
+
+def _SetStructValue(struct_value, value):
+ if value is None:
+ struct_value.null_value = 0
+ elif isinstance(value, bool):
+ # Note: this check must come before the number check because in Python
+ # True and False are also considered numbers.
+ struct_value.bool_value = value
+ elif isinstance(value, str):
+ struct_value.string_value = value
+ elif isinstance(value, (int, float)):
+ struct_value.number_value = value
+ elif isinstance(value, (dict, Struct)):
+ struct_value.struct_value.Clear()
+ struct_value.struct_value.update(value)
+ elif isinstance(value, (list, tuple, ListValue)):
+ struct_value.list_value.Clear()
+ struct_value.list_value.extend(value)
+ else:
+ raise ValueError('Unexpected type')
+
+
+def _GetStructValue(struct_value):
+ which = struct_value.WhichOneof('kind')
+ if which == 'struct_value':
+ return struct_value.struct_value
+ elif which == 'null_value':
+ return None
+ elif which == 'number_value':
+ return struct_value.number_value
+ elif which == 'string_value':
+ return struct_value.string_value
+ elif which == 'bool_value':
+ return struct_value.bool_value
+ elif which == 'list_value':
+ return struct_value.list_value
+ elif which is None:
+ raise ValueError('Value not set')
+
+
+class Struct(object):
+ """Class for Struct message type."""
+
+ __slots__ = ()
+
+ def __getitem__(self, key):
+ return _GetStructValue(self.fields[key])
+
+ def __contains__(self, item):
+ return item in self.fields
+
+ def __setitem__(self, key, value):
+ _SetStructValue(self.fields[key], value)
+
+ def __delitem__(self, key):
+ del self.fields[key]
+
+ def __len__(self):
+ return len(self.fields)
+
+ def __iter__(self):
+ return iter(self.fields)
+
+ def keys(self): # pylint: disable=invalid-name
+ return self.fields.keys()
+
+ def values(self): # pylint: disable=invalid-name
+ return [self[key] for key in self]
+
+ def items(self): # pylint: disable=invalid-name
+ return [(key, self[key]) for key in self]
+
+ def get_or_create_list(self, key):
+ """Returns a list for this key, creating if it didn't exist already."""
+ if not self.fields[key].HasField('list_value'):
+ # Clear will mark list_value modified which will indeed create a list.
+ self.fields[key].list_value.Clear()
+ return self.fields[key].list_value
+
+ def get_or_create_struct(self, key):
+ """Returns a struct for this key, creating if it didn't exist already."""
+ if not self.fields[key].HasField('struct_value'):
+ # Clear will mark struct_value modified which will indeed create a struct.
+ self.fields[key].struct_value.Clear()
+ return self.fields[key].struct_value
+
+ def update(self, dictionary): # pylint: disable=invalid-name
+ for key, value in dictionary.items():
+ _SetStructValue(self.fields[key], value)
+
+collections.abc.MutableMapping.register(Struct)
+
+
+class ListValue(object):
+ """Class for ListValue message type."""
+
+ __slots__ = ()
+
+ def __len__(self):
+ return len(self.values)
+
+ def append(self, value):
+ _SetStructValue(self.values.add(), value)
+
+ def extend(self, elem_seq):
+ for value in elem_seq:
+ self.append(value)
+
+ def __getitem__(self, index):
+ """Retrieves item by the specified index."""
+ return _GetStructValue(self.values.__getitem__(index))
+
+ def __setitem__(self, index, value):
+ _SetStructValue(self.values.__getitem__(index), value)
+
+ def __delitem__(self, key):
+ del self.values[key]
+
+ def items(self):
+ for i in range(len(self)):
+ yield self[i]
+
+ def add_struct(self):
+ """Appends and returns a struct value as the next value in the list."""
+ struct_value = self.values.add().struct_value
+ # Clear will mark struct_value modified which will indeed create a struct.
+ struct_value.Clear()
+ return struct_value
+
+ def add_list(self):
+ """Appends and returns a list value as the next value in the list."""
+ list_value = self.values.add().list_value
+ # Clear will mark list_value modified which will indeed create a list.
+ list_value.Clear()
+ return list_value
+
+collections.abc.MutableSequence.register(ListValue)
+
+
+# LINT.IfChange(wktbases)
+WKTBASES = {
+ 'google.protobuf.Any': Any,
+ 'google.protobuf.Duration': Duration,
+ 'google.protobuf.FieldMask': FieldMask,
+ 'google.protobuf.ListValue': ListValue,
+ 'google.protobuf.Struct': Struct,
+ 'google.protobuf.Timestamp': Timestamp,
+}
+# LINT.ThenChange(//depot/google.protobuf/compiler/python/pyi_generator.cc:wktbases)
diff --git a/Lib/site-packages/google/protobuf/internal/wire_format.py b/Lib/site-packages/google/protobuf/internal/wire_format.py
new file mode 100644
index 0000000..6237dab
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/internal/wire_format.py
@@ -0,0 +1,245 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Constants and static functions to support protocol buffer wire format."""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+import struct
+from google.protobuf import descriptor
+from google.protobuf import message
+
+
+TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag.
+TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7
+
+# These numbers identify the wire type of a protocol buffer value.
+# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded
+# tag-and-type to store one of these WIRETYPE_* constants.
+# These values must match WireType enum in //google/protobuf/wire_format.h.
+WIRETYPE_VARINT = 0
+WIRETYPE_FIXED64 = 1
+WIRETYPE_LENGTH_DELIMITED = 2
+WIRETYPE_START_GROUP = 3
+WIRETYPE_END_GROUP = 4
+WIRETYPE_FIXED32 = 5
+_WIRETYPE_MAX = 5
+
+
+# Bounds for various integer types.
+INT32_MAX = int((1 << 31) - 1)
+INT32_MIN = int(-(1 << 31))
+UINT32_MAX = (1 << 32) - 1
+
+INT64_MAX = (1 << 63) - 1
+INT64_MIN = -(1 << 63)
+UINT64_MAX = (1 << 64) - 1
+
+# "struct" format strings that will encode/decode the specified formats.
+FORMAT_UINT32_LITTLE_ENDIAN = '> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK)
+
+
+def ZigZagEncode(value):
+ """ZigZag Transform: Encodes signed integers so that they can be
+ effectively used with varint encoding. See wire_format.h for
+ more details.
+ """
+ if value >= 0:
+ return value << 1
+ return (value << 1) ^ (~0)
+
+
+def ZigZagDecode(value):
+ """Inverse of ZigZagEncode()."""
+ if not value & 0x1:
+ return value >> 1
+ return (value >> 1) ^ (~0)
+
+
+
+# The *ByteSize() functions below return the number of bytes required to
+# serialize "field number + type" information and then serialize the value.
+
+
+def Int32ByteSize(field_number, int32):
+ return Int64ByteSize(field_number, int32)
+
+
+def Int32ByteSizeNoTag(int32):
+ return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32)
+
+
+def Int64ByteSize(field_number, int64):
+ # Have to convert to uint before calling UInt64ByteSize().
+ return UInt64ByteSize(field_number, 0xffffffffffffffff & int64)
+
+
+def UInt32ByteSize(field_number, uint32):
+ return UInt64ByteSize(field_number, uint32)
+
+
+def UInt64ByteSize(field_number, uint64):
+ return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64)
+
+
+def SInt32ByteSize(field_number, int32):
+ return UInt32ByteSize(field_number, ZigZagEncode(int32))
+
+
+def SInt64ByteSize(field_number, int64):
+ return UInt64ByteSize(field_number, ZigZagEncode(int64))
+
+
+def Fixed32ByteSize(field_number, fixed32):
+ return TagByteSize(field_number) + 4
+
+
+def Fixed64ByteSize(field_number, fixed64):
+ return TagByteSize(field_number) + 8
+
+
+def SFixed32ByteSize(field_number, sfixed32):
+ return TagByteSize(field_number) + 4
+
+
+def SFixed64ByteSize(field_number, sfixed64):
+ return TagByteSize(field_number) + 8
+
+
+def FloatByteSize(field_number, flt):
+ return TagByteSize(field_number) + 4
+
+
+def DoubleByteSize(field_number, double):
+ return TagByteSize(field_number) + 8
+
+
+def BoolByteSize(field_number, b):
+ return TagByteSize(field_number) + 1
+
+
+def EnumByteSize(field_number, enum):
+ return UInt32ByteSize(field_number, enum)
+
+
+def StringByteSize(field_number, string):
+ return BytesByteSize(field_number, string.encode('utf-8'))
+
+
+def BytesByteSize(field_number, b):
+ return (TagByteSize(field_number)
+ + _VarUInt64ByteSizeNoTag(len(b))
+ + len(b))
+
+
+def GroupByteSize(field_number, message):
+ return (2 * TagByteSize(field_number) # START and END group.
+ + message.ByteSize())
+
+
+def MessageByteSize(field_number, message):
+ return (TagByteSize(field_number)
+ + _VarUInt64ByteSizeNoTag(message.ByteSize())
+ + message.ByteSize())
+
+
+def MessageSetItemByteSize(field_number, msg):
+ # First compute the sizes of the tags.
+ # There are 2 tags for the beginning and ending of the repeated group, that
+ # is field number 1, one with field number 2 (type_id) and one with field
+ # number 3 (message).
+ total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3))
+
+ # Add the number of bytes for type_id.
+ total_size += _VarUInt64ByteSizeNoTag(field_number)
+
+ message_size = msg.ByteSize()
+
+ # The number of bytes for encoding the length of the message.
+ total_size += _VarUInt64ByteSizeNoTag(message_size)
+
+ # The size of the message.
+ total_size += message_size
+ return total_size
+
+
+def TagByteSize(field_number):
+ """Returns the bytes required to serialize a tag with this field number."""
+ # Just pass in type 0, since the type won't affect the tag+type size.
+ return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0))
+
+
+# Private helper function for the *ByteSize() functions above.
+
+def _VarUInt64ByteSizeNoTag(uint64):
+ """Returns the number of bytes required to serialize a single varint
+ using boundary value comparisons. (unrolled loop optimization -WPierce)
+ uint64 must be unsigned.
+ """
+ if uint64 <= 0x7f: return 1
+ if uint64 <= 0x3fff: return 2
+ if uint64 <= 0x1fffff: return 3
+ if uint64 <= 0xfffffff: return 4
+ if uint64 <= 0x7ffffffff: return 5
+ if uint64 <= 0x3ffffffffff: return 6
+ if uint64 <= 0x1ffffffffffff: return 7
+ if uint64 <= 0xffffffffffffff: return 8
+ if uint64 <= 0x7fffffffffffffff: return 9
+ if uint64 > UINT64_MAX:
+ raise message.EncodeError('Value out of range: %d' % uint64)
+ return 10
+
+
+NON_PACKABLE_TYPES = (
+ descriptor.FieldDescriptor.TYPE_STRING,
+ descriptor.FieldDescriptor.TYPE_GROUP,
+ descriptor.FieldDescriptor.TYPE_MESSAGE,
+ descriptor.FieldDescriptor.TYPE_BYTES
+)
+
+
+def IsTypePackable(field_type):
+ """Return true iff packable = true is valid for fields of this type.
+
+ Args:
+ field_type: a FieldDescriptor::Type value.
+
+ Returns:
+ True iff fields of this type are packable.
+ """
+ return field_type not in NON_PACKABLE_TYPES
diff --git a/Lib/site-packages/google/protobuf/json_format.py b/Lib/site-packages/google/protobuf/json_format.py
new file mode 100644
index 0000000..1b6ce9d
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/json_format.py
@@ -0,0 +1,904 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Contains routines for printing protocol messages in JSON format.
+
+Simple usage example:
+
+ # Create a proto object and serialize it to a json format string.
+ message = my_proto_pb2.MyMessage(foo='bar')
+ json_string = json_format.MessageToJson(message)
+
+ # Parse a json format string to proto object.
+ message = json_format.Parse(json_string, my_proto_pb2.MyMessage())
+"""
+
+__author__ = 'jieluo@google.com (Jie Luo)'
+
+
+import base64
+from collections import OrderedDict
+import json
+import math
+from operator import methodcaller
+import re
+
+from google.protobuf.internal import type_checkers
+from google.protobuf import descriptor
+from google.protobuf import message_factory
+from google.protobuf import symbol_database
+
+
+_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32,
+ descriptor.FieldDescriptor.CPPTYPE_UINT32,
+ descriptor.FieldDescriptor.CPPTYPE_INT64,
+ descriptor.FieldDescriptor.CPPTYPE_UINT64])
+_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64,
+ descriptor.FieldDescriptor.CPPTYPE_UINT64])
+_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT,
+ descriptor.FieldDescriptor.CPPTYPE_DOUBLE])
+_INFINITY = 'Infinity'
+_NEG_INFINITY = '-Infinity'
+_NAN = 'NaN'
+
+_UNPAIRED_SURROGATE_PATTERN = re.compile(
+ u'[\ud800-\udbff](?![\udc00-\udfff])|(? self.max_recursion_depth:
+ raise ParseError('Message too deep. Max recursion depth is {0}'.format(
+ self.max_recursion_depth))
+ message_descriptor = message.DESCRIPTOR
+ full_name = message_descriptor.full_name
+ if not path:
+ path = message_descriptor.name
+ if _IsWrapperMessage(message_descriptor):
+ self._ConvertWrapperMessage(value, message, path)
+ elif full_name in _WKTJSONMETHODS:
+ methodcaller(_WKTJSONMETHODS[full_name][1], value, message, path)(self)
+ else:
+ self._ConvertFieldValuePair(value, message, path)
+ self.recursion_depth -= 1
+
+ def _ConvertFieldValuePair(self, js, message, path):
+ """Convert field value pairs into regular message.
+
+ Args:
+ js: A JSON object to convert the field value pairs.
+ message: A regular protocol message to record the data.
+ path: parent path to log parse error info.
+
+ Raises:
+ ParseError: In case of problems converting.
+ """
+ names = []
+ message_descriptor = message.DESCRIPTOR
+ fields_by_json_name = dict((f.json_name, f)
+ for f in message_descriptor.fields)
+ for name in js:
+ try:
+ field = fields_by_json_name.get(name, None)
+ if not field:
+ field = message_descriptor.fields_by_name.get(name, None)
+ if not field and _VALID_EXTENSION_NAME.match(name):
+ if not message_descriptor.is_extendable:
+ raise ParseError(
+ 'Message type {0} does not have extensions at {1}'.format(
+ message_descriptor.full_name, path))
+ identifier = name[1:-1] # strip [] brackets
+ # pylint: disable=protected-access
+ field = message.Extensions._FindExtensionByName(identifier)
+ # pylint: enable=protected-access
+ if not field:
+ # Try looking for extension by the message type name, dropping the
+ # field name following the final . separator in full_name.
+ identifier = '.'.join(identifier.split('.')[:-1])
+ # pylint: disable=protected-access
+ field = message.Extensions._FindExtensionByName(identifier)
+ # pylint: enable=protected-access
+ if not field:
+ if self.ignore_unknown_fields:
+ continue
+ raise ParseError(
+ ('Message type "{0}" has no field named "{1}" at "{2}".\n'
+ ' Available Fields(except extensions): "{3}"').format(
+ message_descriptor.full_name, name, path,
+ [f.json_name for f in message_descriptor.fields]))
+ if name in names:
+ raise ParseError('Message type "{0}" should not have multiple '
+ '"{1}" fields at "{2}".'.format(
+ message.DESCRIPTOR.full_name, name, path))
+ names.append(name)
+ value = js[name]
+ # Check no other oneof field is parsed.
+ if field.containing_oneof is not None and value is not None:
+ oneof_name = field.containing_oneof.name
+ if oneof_name in names:
+ raise ParseError('Message type "{0}" should not have multiple '
+ '"{1}" oneof fields at "{2}".'.format(
+ message.DESCRIPTOR.full_name, oneof_name,
+ path))
+ names.append(oneof_name)
+
+ if value is None:
+ if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE
+ and field.message_type.full_name == 'google.protobuf.Value'):
+ sub_message = getattr(message, field.name)
+ sub_message.null_value = 0
+ elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM
+ and field.enum_type.full_name == 'google.protobuf.NullValue'):
+ setattr(message, field.name, 0)
+ else:
+ message.ClearField(field.name)
+ continue
+
+ # Parse field value.
+ if _IsMapEntry(field):
+ message.ClearField(field.name)
+ self._ConvertMapFieldValue(value, message, field,
+ '{0}.{1}'.format(path, name))
+ elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+ message.ClearField(field.name)
+ if not isinstance(value, list):
+ raise ParseError('repeated field {0} must be in [] which is '
+ '{1} at {2}'.format(name, value, path))
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
+ # Repeated message field.
+ for index, item in enumerate(value):
+ sub_message = getattr(message, field.name).add()
+ # None is a null_value in Value.
+ if (item is None and
+ sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'):
+ raise ParseError('null is not allowed to be used as an element'
+ ' in a repeated field at {0}.{1}[{2}]'.format(
+ path, name, index))
+ self.ConvertMessage(item, sub_message,
+ '{0}.{1}[{2}]'.format(path, name, index))
+ else:
+ # Repeated scalar field.
+ for index, item in enumerate(value):
+ if item is None:
+ raise ParseError('null is not allowed to be used as an element'
+ ' in a repeated field at {0}.{1}[{2}]'.format(
+ path, name, index))
+ getattr(message, field.name).append(
+ _ConvertScalarFieldValue(
+ item, field, '{0}.{1}[{2}]'.format(path, name, index)))
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
+ if field.is_extension:
+ sub_message = message.Extensions[field]
+ else:
+ sub_message = getattr(message, field.name)
+ sub_message.SetInParent()
+ self.ConvertMessage(value, sub_message, '{0}.{1}'.format(path, name))
+ else:
+ if field.is_extension:
+ message.Extensions[field] = _ConvertScalarFieldValue(
+ value, field, '{0}.{1}'.format(path, name))
+ else:
+ setattr(
+ message, field.name,
+ _ConvertScalarFieldValue(value, field,
+ '{0}.{1}'.format(path, name)))
+ except ParseError as e:
+ if field and field.containing_oneof is None:
+ raise ParseError(
+ 'Failed to parse {0} field: {1}.'.format(name, e)
+ ) from e
+ else:
+ raise ParseError(str(e)) from e
+ except ValueError as e:
+ raise ParseError(
+ 'Failed to parse {0} field: {1}.'.format(name, e)
+ ) from e
+ except TypeError as e:
+ raise ParseError(
+ 'Failed to parse {0} field: {1}.'.format(name, e)
+ ) from e
+
+ def _ConvertAnyMessage(self, value, message, path):
+ """Convert a JSON representation into Any message."""
+ if isinstance(value, dict) and not value:
+ return
+ try:
+ type_url = value['@type']
+ except KeyError as e:
+ raise ParseError(
+ '@type is missing when parsing any message at {0}'.format(path)
+ ) from e
+
+ try:
+ sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
+ except TypeError as e:
+ raise ParseError('{0} at {1}'.format(e, path)) from e
+ message_descriptor = sub_message.DESCRIPTOR
+ full_name = message_descriptor.full_name
+ if _IsWrapperMessage(message_descriptor):
+ self._ConvertWrapperMessage(value['value'], sub_message,
+ '{0}.value'.format(path))
+ elif full_name in _WKTJSONMETHODS:
+ methodcaller(_WKTJSONMETHODS[full_name][1], value['value'], sub_message,
+ '{0}.value'.format(path))(
+ self)
+ else:
+ del value['@type']
+ self._ConvertFieldValuePair(value, sub_message, path)
+ value['@type'] = type_url
+ # Sets Any message
+ message.value = sub_message.SerializeToString()
+ message.type_url = type_url
+
+ def _ConvertGenericMessage(self, value, message, path):
+ """Convert a JSON representation into message with FromJsonString."""
+ # Duration, Timestamp, FieldMask have a FromJsonString method to do the
+ # conversion. Users can also call the method directly.
+ try:
+ message.FromJsonString(value)
+ except ValueError as e:
+ raise ParseError('{0} at {1}'.format(e, path)) from e
+
+ def _ConvertValueMessage(self, value, message, path):
+ """Convert a JSON representation into Value message."""
+ if isinstance(value, dict):
+ self._ConvertStructMessage(value, message.struct_value, path)
+ elif isinstance(value, list):
+ self._ConvertListValueMessage(value, message.list_value, path)
+ elif value is None:
+ message.null_value = 0
+ elif isinstance(value, bool):
+ message.bool_value = value
+ elif isinstance(value, str):
+ message.string_value = value
+ elif isinstance(value, _INT_OR_FLOAT):
+ message.number_value = value
+ else:
+ raise ParseError('Value {0} has unexpected type {1} at {2}'.format(
+ value, type(value), path))
+
+ def _ConvertListValueMessage(self, value, message, path):
+ """Convert a JSON representation into ListValue message."""
+ if not isinstance(value, list):
+ raise ParseError('ListValue must be in [] which is {0} at {1}'.format(
+ value, path))
+ message.ClearField('values')
+ for index, item in enumerate(value):
+ self._ConvertValueMessage(item, message.values.add(),
+ '{0}[{1}]'.format(path, index))
+
+ def _ConvertStructMessage(self, value, message, path):
+ """Convert a JSON representation into Struct message."""
+ if not isinstance(value, dict):
+ raise ParseError('Struct must be in a dict which is {0} at {1}'.format(
+ value, path))
+ # Clear will mark the struct as modified so it will be created even if
+ # there are no values.
+ message.Clear()
+ for key in value:
+ self._ConvertValueMessage(value[key], message.fields[key],
+ '{0}.{1}'.format(path, key))
+ return
+
+ def _ConvertWrapperMessage(self, value, message, path):
+ """Convert a JSON representation into Wrapper message."""
+ field = message.DESCRIPTOR.fields_by_name['value']
+ setattr(
+ message, 'value',
+ _ConvertScalarFieldValue(value, field, path='{0}.value'.format(path)))
+
+ def _ConvertMapFieldValue(self, value, message, field, path):
+ """Convert map field value for a message map field.
+
+ Args:
+ value: A JSON object to convert the map field value.
+ message: A protocol message to record the converted data.
+ field: The descriptor of the map field to be converted.
+ path: parent path to log parse error info.
+
+ Raises:
+ ParseError: In case of convert problems.
+ """
+ if not isinstance(value, dict):
+ raise ParseError(
+ 'Map field {0} must be in a dict which is {1} at {2}'.format(
+ field.name, value, path))
+ key_field = field.message_type.fields_by_name['key']
+ value_field = field.message_type.fields_by_name['value']
+ for key in value:
+ key_value = _ConvertScalarFieldValue(key, key_field,
+ '{0}.key'.format(path), True)
+ if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
+ self.ConvertMessage(value[key],
+ getattr(message, field.name)[key_value],
+ '{0}[{1}]'.format(path, key_value))
+ else:
+ getattr(message, field.name)[key_value] = _ConvertScalarFieldValue(
+ value[key], value_field, path='{0}[{1}]'.format(path, key_value))
+
+
+def _ConvertScalarFieldValue(value, field, path, require_str=False):
+ """Convert a single scalar field value.
+
+ Args:
+ value: A scalar value to convert the scalar field value.
+ field: The descriptor of the field to convert.
+ path: parent path to log parse error info.
+ require_str: If True, the field value must be a str.
+
+ Returns:
+ The converted scalar field value
+
+ Raises:
+ ParseError: In case of convert problems.
+ """
+ try:
+ if field.cpp_type in _INT_TYPES:
+ return _ConvertInteger(value)
+ elif field.cpp_type in _FLOAT_TYPES:
+ return _ConvertFloat(value, field)
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
+ return _ConvertBool(value, require_str)
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
+ if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
+ if isinstance(value, str):
+ encoded = value.encode('utf-8')
+ else:
+ encoded = value
+ # Add extra padding '='
+ padded_value = encoded + b'=' * (4 - len(encoded) % 4)
+ return base64.urlsafe_b64decode(padded_value)
+ else:
+ # Checking for unpaired surrogates appears to be unreliable,
+ # depending on the specific Python version, so we check manually.
+ if _UNPAIRED_SURROGATE_PATTERN.search(value):
+ raise ParseError('Unpaired surrogate')
+ return value
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
+ # Convert an enum value.
+ enum_value = field.enum_type.values_by_name.get(value, None)
+ if enum_value is None:
+ try:
+ number = int(value)
+ enum_value = field.enum_type.values_by_number.get(number, None)
+ except ValueError as e:
+ raise ParseError('Invalid enum value {0} for enum type {1}'.format(
+ value, field.enum_type.full_name)) from e
+ if enum_value is None:
+ if field.enum_type.is_closed:
+ raise ParseError('Invalid enum value {0} for enum type {1}'.format(
+ value, field.enum_type.full_name))
+ else:
+ return number
+ return enum_value.number
+ except ParseError as e:
+ raise ParseError('{0} at {1}'.format(e, path)) from e
+
+
+def _ConvertInteger(value):
+ """Convert an integer.
+
+ Args:
+ value: A scalar value to convert.
+
+ Returns:
+ The integer value.
+
+ Raises:
+ ParseError: If an integer couldn't be consumed.
+ """
+ if isinstance(value, float) and not value.is_integer():
+ raise ParseError('Couldn\'t parse integer: {0}'.format(value))
+
+ if isinstance(value, str) and value.find(' ') != -1:
+ raise ParseError('Couldn\'t parse integer: "{0}"'.format(value))
+
+ if isinstance(value, bool):
+ raise ParseError('Bool value {0} is not acceptable for '
+ 'integer field'.format(value))
+
+ return int(value)
+
+
+def _ConvertFloat(value, field):
+ """Convert an floating point number."""
+ if isinstance(value, float):
+ if math.isnan(value):
+ raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead')
+ if math.isinf(value):
+ if value > 0:
+ raise ParseError('Couldn\'t parse Infinity or value too large, '
+ 'use quoted "Infinity" instead')
+ else:
+ raise ParseError('Couldn\'t parse -Infinity or value too small, '
+ 'use quoted "-Infinity" instead')
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:
+ # pylint: disable=protected-access
+ if value > type_checkers._FLOAT_MAX:
+ raise ParseError('Float value too large')
+ # pylint: disable=protected-access
+ if value < type_checkers._FLOAT_MIN:
+ raise ParseError('Float value too small')
+ if value == 'nan':
+ raise ParseError('Couldn\'t parse float "nan", use "NaN" instead')
+ try:
+ # Assume Python compatible syntax.
+ return float(value)
+ except ValueError as e:
+ # Check alternative spellings.
+ if value == _NEG_INFINITY:
+ return float('-inf')
+ elif value == _INFINITY:
+ return float('inf')
+ elif value == _NAN:
+ return float('nan')
+ else:
+ raise ParseError('Couldn\'t parse float: {0}'.format(value)) from e
+
+
+def _ConvertBool(value, require_str):
+ """Convert a boolean value.
+
+ Args:
+ value: A scalar value to convert.
+ require_str: If True, value must be a str.
+
+ Returns:
+ The bool parsed.
+
+ Raises:
+ ParseError: If a boolean value couldn't be consumed.
+ """
+ if require_str:
+ if value == 'true':
+ return True
+ elif value == 'false':
+ return False
+ else:
+ raise ParseError('Expected "true" or "false", not {0}'.format(value))
+
+ if not isinstance(value, bool):
+ raise ParseError('Expected true or false without quotes')
+ return value
+
+_WKTJSONMETHODS = {
+ 'google.protobuf.Any': ['_AnyMessageToJsonObject',
+ '_ConvertAnyMessage'],
+ 'google.protobuf.Duration': ['_GenericMessageToJsonObject',
+ '_ConvertGenericMessage'],
+ 'google.protobuf.FieldMask': ['_GenericMessageToJsonObject',
+ '_ConvertGenericMessage'],
+ 'google.protobuf.ListValue': ['_ListValueMessageToJsonObject',
+ '_ConvertListValueMessage'],
+ 'google.protobuf.Struct': ['_StructMessageToJsonObject',
+ '_ConvertStructMessage'],
+ 'google.protobuf.Timestamp': ['_GenericMessageToJsonObject',
+ '_ConvertGenericMessage'],
+ 'google.protobuf.Value': ['_ValueMessageToJsonObject',
+ '_ConvertValueMessage']
+}
diff --git a/Lib/site-packages/google/protobuf/message.py b/Lib/site-packages/google/protobuf/message.py
new file mode 100644
index 0000000..29ebd7b
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/message.py
@@ -0,0 +1,399 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+# TODO: We should just make these methods all "pure-virtual" and move
+# all implementation out, into reflection.py for now.
+
+
+"""Contains an abstract base class for protocol messages."""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+class Error(Exception):
+ """Base error type for this module."""
+ pass
+
+
+class DecodeError(Error):
+ """Exception raised when deserializing messages."""
+ pass
+
+
+class EncodeError(Error):
+ """Exception raised when serializing messages."""
+ pass
+
+
+class Message(object):
+
+ """Abstract base class for protocol messages.
+
+ Protocol message classes are almost always generated by the protocol
+ compiler. These generated types subclass Message and implement the methods
+ shown below.
+ """
+
+ # TODO: Link to an HTML document here.
+
+ # TODO: Document that instances of this class will also
+ # have an Extensions attribute with __getitem__ and __setitem__.
+ # Again, not sure how to best convey this.
+
+ # TODO: Document these fields and methods.
+
+ __slots__ = []
+
+ #: The :class:`google.protobuf.Descriptor`
+ # for this message type.
+ DESCRIPTOR = None
+
+ def __deepcopy__(self, memo=None):
+ clone = type(self)()
+ clone.MergeFrom(self)
+ return clone
+
+ def __eq__(self, other_msg):
+ """Recursively compares two messages by value and structure."""
+ raise NotImplementedError
+
+ def __ne__(self, other_msg):
+ # Can't just say self != other_msg, since that would infinitely recurse. :)
+ return not self == other_msg
+
+ def __hash__(self):
+ raise TypeError('unhashable object')
+
+ def __str__(self):
+ """Outputs a human-readable representation of the message."""
+ raise NotImplementedError
+
+ def __unicode__(self):
+ """Outputs a human-readable representation of the message."""
+ raise NotImplementedError
+
+ def MergeFrom(self, other_msg):
+ """Merges the contents of the specified message into current message.
+
+ This method merges the contents of the specified message into the current
+ message. Singular fields that are set in the specified message overwrite
+ the corresponding fields in the current message. Repeated fields are
+ appended. Singular sub-messages and groups are recursively merged.
+
+ Args:
+ other_msg (Message): A message to merge into the current message.
+ """
+ raise NotImplementedError
+
+ def CopyFrom(self, other_msg):
+ """Copies the content of the specified message into the current message.
+
+ The method clears the current message and then merges the specified
+ message using MergeFrom.
+
+ Args:
+ other_msg (Message): A message to copy into the current one.
+ """
+ if self is other_msg:
+ return
+ self.Clear()
+ self.MergeFrom(other_msg)
+
+ def Clear(self):
+ """Clears all data that was set in the message."""
+ raise NotImplementedError
+
+ def SetInParent(self):
+ """Mark this as present in the parent.
+
+ This normally happens automatically when you assign a field of a
+ sub-message, but sometimes you want to make the sub-message
+ present while keeping it empty. If you find yourself using this,
+ you may want to reconsider your design.
+ """
+ raise NotImplementedError
+
+ def IsInitialized(self):
+ """Checks if the message is initialized.
+
+ Returns:
+ bool: The method returns True if the message is initialized (i.e. all of
+ its required fields are set).
+ """
+ raise NotImplementedError
+
+ # TODO: MergeFromString() should probably return None and be
+ # implemented in terms of a helper that returns the # of bytes read. Our
+ # deserialization routines would use the helper when recursively
+ # deserializing, but the end user would almost always just want the no-return
+ # MergeFromString().
+
+ def MergeFromString(self, serialized):
+ """Merges serialized protocol buffer data into this message.
+
+ When we find a field in `serialized` that is already present
+ in this message:
+
+ - If it's a "repeated" field, we append to the end of our list.
+ - Else, if it's a scalar, we overwrite our field.
+ - Else, (it's a nonrepeated composite), we recursively merge
+ into the existing composite.
+
+ Args:
+ serialized (bytes): Any object that allows us to call
+ ``memoryview(serialized)`` to access a string of bytes using the
+ buffer interface.
+
+ Returns:
+ int: The number of bytes read from `serialized`.
+ For non-group messages, this will always be `len(serialized)`,
+ but for messages which are actually groups, this will
+ generally be less than `len(serialized)`, since we must
+ stop when we reach an ``END_GROUP`` tag. Note that if
+ we *do* stop because of an ``END_GROUP`` tag, the number
+ of bytes returned does not include the bytes
+ for the ``END_GROUP`` tag information.
+
+ Raises:
+ DecodeError: if the input cannot be parsed.
+ """
+ # TODO: Document handling of unknown fields.
+ # TODO: When we switch to a helper, this will return None.
+ raise NotImplementedError
+
+ def ParseFromString(self, serialized):
+ """Parse serialized protocol buffer data in binary form into this message.
+
+ Like :func:`MergeFromString()`, except we clear the object first.
+
+ Raises:
+ message.DecodeError if the input cannot be parsed.
+ """
+ self.Clear()
+ return self.MergeFromString(serialized)
+
+ def SerializeToString(self, **kwargs):
+ """Serializes the protocol message to a binary string.
+
+ Keyword Args:
+ deterministic (bool): If true, requests deterministic serialization
+ of the protobuf, with predictable ordering of map keys.
+
+ Returns:
+ A binary string representation of the message if all of the required
+ fields in the message are set (i.e. the message is initialized).
+
+ Raises:
+ EncodeError: if the message isn't initialized (see :func:`IsInitialized`).
+ """
+ raise NotImplementedError
+
+ def SerializePartialToString(self, **kwargs):
+ """Serializes the protocol message to a binary string.
+
+ This method is similar to SerializeToString but doesn't check if the
+ message is initialized.
+
+ Keyword Args:
+ deterministic (bool): If true, requests deterministic serialization
+ of the protobuf, with predictable ordering of map keys.
+
+ Returns:
+ bytes: A serialized representation of the partial message.
+ """
+ raise NotImplementedError
+
+ # TODO: Decide whether we like these better
+ # than auto-generated has_foo() and clear_foo() methods
+ # on the instances themselves. This way is less consistent
+ # with C++, but it makes reflection-type access easier and
+ # reduces the number of magically autogenerated things.
+ #
+ # TODO: Be sure to document (and test) exactly
+ # which field names are accepted here. Are we case-sensitive?
+ # What do we do with fields that share names with Python keywords
+ # like 'lambda' and 'yield'?
+ #
+ # nnorwitz says:
+ # """
+ # Typically (in python), an underscore is appended to names that are
+ # keywords. So they would become lambda_ or yield_.
+ # """
+ def ListFields(self):
+ """Returns a list of (FieldDescriptor, value) tuples for present fields.
+
+ A message field is non-empty if HasField() would return true. A singular
+ primitive field is non-empty if HasField() would return true in proto2 or it
+ is non zero in proto3. A repeated field is non-empty if it contains at least
+ one element. The fields are ordered by field number.
+
+ Returns:
+ list[tuple(FieldDescriptor, value)]: field descriptors and values
+ for all fields in the message which are not empty. The values vary by
+ field type.
+ """
+ raise NotImplementedError
+
+ def HasField(self, field_name):
+ """Checks if a certain field is set for the message.
+
+ For a oneof group, checks if any field inside is set. Note that if the
+ field_name is not defined in the message descriptor, :exc:`ValueError` will
+ be raised.
+
+ Args:
+ field_name (str): The name of the field to check for presence.
+
+ Returns:
+ bool: Whether a value has been set for the named field.
+
+ Raises:
+ ValueError: if the `field_name` is not a member of this message.
+ """
+ raise NotImplementedError
+
+ def ClearField(self, field_name):
+ """Clears the contents of a given field.
+
+ Inside a oneof group, clears the field set. If the name neither refers to a
+ defined field or oneof group, :exc:`ValueError` is raised.
+
+ Args:
+ field_name (str): The name of the field to check for presence.
+
+ Raises:
+ ValueError: if the `field_name` is not a member of this message.
+ """
+ raise NotImplementedError
+
+ def WhichOneof(self, oneof_group):
+ """Returns the name of the field that is set inside a oneof group.
+
+ If no field is set, returns None.
+
+ Args:
+ oneof_group (str): the name of the oneof group to check.
+
+ Returns:
+ str or None: The name of the group that is set, or None.
+
+ Raises:
+ ValueError: no group with the given name exists
+ """
+ raise NotImplementedError
+
+ def HasExtension(self, field_descriptor):
+ """Checks if a certain extension is present for this message.
+
+ Extensions are retrieved using the :attr:`Extensions` mapping (if present).
+
+ Args:
+ field_descriptor: The field descriptor for the extension to check.
+
+ Returns:
+ bool: Whether the extension is present for this message.
+
+ Raises:
+ KeyError: if the extension is repeated. Similar to repeated fields,
+ there is no separate notion of presence: a "not present" repeated
+ extension is an empty list.
+ """
+ raise NotImplementedError
+
+ def ClearExtension(self, field_descriptor):
+ """Clears the contents of a given extension.
+
+ Args:
+ field_descriptor: The field descriptor for the extension to clear.
+ """
+ raise NotImplementedError
+
+ def UnknownFields(self):
+ """Returns the UnknownFieldSet.
+
+ Returns:
+ UnknownFieldSet: The unknown fields stored in this message.
+ """
+ raise NotImplementedError
+
+ def DiscardUnknownFields(self):
+ """Clears all fields in the :class:`UnknownFieldSet`.
+
+ This operation is recursive for nested message.
+ """
+ raise NotImplementedError
+
+ def ByteSize(self):
+ """Returns the serialized size of this message.
+
+ Recursively calls ByteSize() on all contained messages.
+
+ Returns:
+ int: The number of bytes required to serialize this message.
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def FromString(cls, s):
+ raise NotImplementedError
+
+# TODO: Remove it in OSS
+ @staticmethod
+ def RegisterExtension(field_descriptor):
+ raise NotImplementedError
+
+ def _SetListener(self, message_listener):
+ """Internal method used by the protocol message implementation.
+ Clients should not call this directly.
+
+ Sets a listener that this message will call on certain state transitions.
+
+ The purpose of this method is to register back-edges from children to
+ parents at runtime, for the purpose of setting "has" bits and
+ byte-size-dirty bits in the parent and ancestor objects whenever a child or
+ descendant object is modified.
+
+ If the client wants to disconnect this Message from the object tree, she
+ explicitly sets callback to None.
+
+ If message_listener is None, unregisters any existing listener. Otherwise,
+ message_listener must implement the MessageListener interface in
+ internal/message_listener.py, and we discard any listener registered
+ via a previous _SetListener() call.
+ """
+ raise NotImplementedError
+
+ def __getstate__(self):
+ """Support the pickle protocol."""
+ return dict(serialized=self.SerializePartialToString())
+
+ def __setstate__(self, state):
+ """Support the pickle protocol."""
+ self.__init__()
+ serialized = state['serialized']
+ # On Python 3, using encoding='latin1' is required for unpickling
+ # protos pickled by Python 2.
+ if not isinstance(serialized, bytes):
+ serialized = serialized.encode('latin1')
+ self.ParseFromString(serialized)
+
+ def __reduce__(self):
+ message_descriptor = self.DESCRIPTOR
+ if message_descriptor.containing_type is None:
+ return type(self), (), self.__getstate__()
+ # the message type must be nested.
+ # Python does not pickle nested classes; use the symbol_database on the
+ # receiving end.
+ container = message_descriptor
+ return (_InternalConstructMessage, (container.full_name,),
+ self.__getstate__())
+
+
+def _InternalConstructMessage(full_name):
+ """Constructs a nested message."""
+ from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top
+
+ return symbol_database.Default().GetSymbol(full_name)()
diff --git a/Lib/site-packages/google/protobuf/message_factory.py b/Lib/site-packages/google/protobuf/message_factory.py
new file mode 100644
index 0000000..56fff6d
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/message_factory.py
@@ -0,0 +1,233 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Provides a factory class for generating dynamic messages.
+
+The easiest way to use this class is if you have access to the FileDescriptor
+protos containing the messages you want to create you can just do the following:
+
+message_classes = message_factory.GetMessages(iterable_of_file_descriptors)
+my_proto_instance = message_classes['some.proto.package.MessageName']()
+"""
+
+__author__ = 'matthewtoia@google.com (Matt Toia)'
+
+import warnings
+
+from google.protobuf.internal import api_implementation
+from google.protobuf import descriptor_pool
+from google.protobuf import message
+
+if api_implementation.Type() == 'python':
+ from google.protobuf.internal import python_message as message_impl
+else:
+ from google.protobuf.pyext import cpp_message as message_impl # pylint: disable=g-import-not-at-top
+
+
+# The type of all Message classes.
+_GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType
+
+
+def GetMessageClass(descriptor):
+ """Obtains a proto2 message class based on the passed in descriptor.
+
+ Passing a descriptor with a fully qualified name matching a previous
+ invocation will cause the same class to be returned.
+
+ Args:
+ descriptor: The descriptor to build from.
+
+ Returns:
+ A class describing the passed in descriptor.
+ """
+ concrete_class = getattr(descriptor, '_concrete_class', None)
+ if concrete_class:
+ return concrete_class
+ return _InternalCreateMessageClass(descriptor)
+
+
+def GetMessageClassesForFiles(files, pool):
+ """Gets all the messages from specified files.
+
+ This will find and resolve dependencies, failing if the descriptor
+ pool cannot satisfy them.
+
+ Args:
+ files: The file names to extract messages from.
+ pool: The descriptor pool to find the files including the dependent
+ files.
+
+ Returns:
+ A dictionary mapping proto names to the message classes.
+ """
+ result = {}
+ for file_name in files:
+ file_desc = pool.FindFileByName(file_name)
+ for desc in file_desc.message_types_by_name.values():
+ result[desc.full_name] = GetMessageClass(desc)
+
+ # While the extension FieldDescriptors are created by the descriptor pool,
+ # the python classes created in the factory need them to be registered
+ # explicitly, which is done below.
+ #
+ # The call to RegisterExtension will specifically check if the
+ # extension was already registered on the object and either
+ # ignore the registration if the original was the same, or raise
+ # an error if they were different.
+
+ for extension in file_desc.extensions_by_name.values():
+ extended_class = GetMessageClass(extension.containing_type)
+ if api_implementation.Type() != 'python':
+ # TODO: Remove this check here. Duplicate extension
+ # register check should be in descriptor_pool.
+ if extension is not pool.FindExtensionByNumber(
+ extension.containing_type, extension.number
+ ):
+ raise ValueError('Double registration of Extensions')
+ # Recursively load protos for extension field, in order to be able to
+ # fully represent the extension. This matches the behavior for regular
+ # fields too.
+ if extension.message_type:
+ GetMessageClass(extension.message_type)
+ return result
+
+
+def _InternalCreateMessageClass(descriptor):
+ """Builds a proto2 message class based on the passed in descriptor.
+
+ Args:
+ descriptor: The descriptor to build from.
+
+ Returns:
+ A class describing the passed in descriptor.
+ """
+ descriptor_name = descriptor.name
+ result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE(
+ descriptor_name,
+ (message.Message,),
+ {
+ 'DESCRIPTOR': descriptor,
+ # If module not set, it wrongly points to message_factory module.
+ '__module__': None,
+ })
+ for field in descriptor.fields:
+ if field.message_type:
+ GetMessageClass(field.message_type)
+ for extension in result_class.DESCRIPTOR.extensions:
+ extended_class = GetMessageClass(extension.containing_type)
+ if api_implementation.Type() != 'python':
+ # TODO: Remove this check here. Duplicate extension
+ # register check should be in descriptor_pool.
+ pool = extension.containing_type.file.pool
+ if extension is not pool.FindExtensionByNumber(
+ extension.containing_type, extension.number
+ ):
+ raise ValueError('Double registration of Extensions')
+ if extension.message_type:
+ GetMessageClass(extension.message_type)
+ return result_class
+
+
+# Deprecated. Please use GetMessageClass() or GetMessageClassesForFiles()
+# method above instead.
+class MessageFactory(object):
+ """Factory for creating Proto2 messages from descriptors in a pool."""
+
+ def __init__(self, pool=None):
+ """Initializes a new factory."""
+ self.pool = pool or descriptor_pool.DescriptorPool()
+
+ def GetPrototype(self, descriptor):
+ """Obtains a proto2 message class based on the passed in descriptor.
+
+ Passing a descriptor with a fully qualified name matching a previous
+ invocation will cause the same class to be returned.
+
+ Args:
+ descriptor: The descriptor to build from.
+
+ Returns:
+ A class describing the passed in descriptor.
+ """
+ warnings.warn(
+ 'MessageFactory class is deprecated. Please use '
+ 'GetMessageClass() instead of MessageFactory.GetPrototype. '
+ 'MessageFactory class will be removed after 2024.',
+ stacklevel=2,
+ )
+ return GetMessageClass(descriptor)
+
+ def CreatePrototype(self, descriptor):
+ """Builds a proto2 message class based on the passed in descriptor.
+
+ Don't call this function directly, it always creates a new class. Call
+ GetMessageClass() instead.
+
+ Args:
+ descriptor: The descriptor to build from.
+
+ Returns:
+ A class describing the passed in descriptor.
+ """
+ warnings.warn(
+ 'Directly call CreatePrototype is wrong. Please use '
+ 'GetMessageClass() method instead. Directly use '
+ 'CreatePrototype will raise error after July 2023.',
+ stacklevel=2,
+ )
+ return _InternalCreateMessageClass(descriptor)
+
+ def GetMessages(self, files):
+ """Gets all the messages from a specified file.
+
+ This will find and resolve dependencies, failing if the descriptor
+ pool cannot satisfy them.
+
+ Args:
+ files: The file names to extract messages from.
+
+ Returns:
+ A dictionary mapping proto names to the message classes. This will include
+ any dependent messages as well as any messages defined in the same file as
+ a specified message.
+ """
+ warnings.warn(
+ 'MessageFactory class is deprecated. Please use '
+ 'GetMessageClassesForFiles() instead of '
+ 'MessageFactory.GetMessages(). MessageFactory class '
+ 'will be removed after 2024.',
+ stacklevel=2,
+ )
+ return GetMessageClassesForFiles(files, self.pool)
+
+
+def GetMessages(file_protos, pool=None):
+ """Builds a dictionary of all the messages available in a set of files.
+
+ Args:
+ file_protos: Iterable of FileDescriptorProto to build messages out of.
+ pool: The descriptor pool to add the file protos.
+
+ Returns:
+ A dictionary mapping proto names to the message classes. This will include
+ any dependent messages as well as any messages defined in the same file as
+ a specified message.
+ """
+ # The cpp implementation of the protocol buffer library requires to add the
+ # message in topological order of the dependency graph.
+ des_pool = pool or descriptor_pool.DescriptorPool()
+ file_by_name = {file_proto.name: file_proto for file_proto in file_protos}
+ def _AddFile(file_proto):
+ for dependency in file_proto.dependency:
+ if dependency in file_by_name:
+ # Remove from elements to be visited, in order to cut cycles.
+ _AddFile(file_by_name.pop(dependency))
+ des_pool.Add(file_proto)
+ while file_by_name:
+ _AddFile(file_by_name.popitem()[1])
+ return GetMessageClassesForFiles(
+ [file_proto.name for file_proto in file_protos], des_pool)
diff --git a/Lib/site-packages/google/protobuf/proto_builder.py b/Lib/site-packages/google/protobuf/proto_builder.py
new file mode 100644
index 0000000..803d004
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/proto_builder.py
@@ -0,0 +1,111 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Dynamic Protobuf class creator."""
+
+from collections import OrderedDict
+import hashlib
+import os
+
+from google.protobuf import descriptor_pb2
+from google.protobuf import descriptor
+from google.protobuf import descriptor_pool
+from google.protobuf import message_factory
+
+
+def _GetMessageFromFactory(pool, full_name):
+ """Get a proto class from the MessageFactory by name.
+
+ Args:
+ pool: a descriptor pool.
+ full_name: str, the fully qualified name of the proto type.
+ Returns:
+ A class, for the type identified by full_name.
+ Raises:
+ KeyError, if the proto is not found in the factory's descriptor pool.
+ """
+ proto_descriptor = pool.FindMessageTypeByName(full_name)
+ proto_cls = message_factory.GetMessageClass(proto_descriptor)
+ return proto_cls
+
+
+def MakeSimpleProtoClass(fields, full_name=None, pool=None):
+ """Create a Protobuf class whose fields are basic types.
+
+ Note: this doesn't validate field names!
+
+ Args:
+ fields: dict of {name: field_type} mappings for each field in the proto. If
+ this is an OrderedDict the order will be maintained, otherwise the
+ fields will be sorted by name.
+ full_name: optional str, the fully-qualified name of the proto type.
+ pool: optional DescriptorPool instance.
+ Returns:
+ a class, the new protobuf class with a FileDescriptor.
+ """
+ pool_instance = pool or descriptor_pool.DescriptorPool()
+ if full_name is not None:
+ try:
+ proto_cls = _GetMessageFromFactory(pool_instance, full_name)
+ return proto_cls
+ except KeyError:
+ # The factory's DescriptorPool doesn't know about this class yet.
+ pass
+
+ # Get a list of (name, field_type) tuples from the fields dict. If fields was
+ # an OrderedDict we keep the order, but otherwise we sort the field to ensure
+ # consistent ordering.
+ field_items = fields.items()
+ if not isinstance(fields, OrderedDict):
+ field_items = sorted(field_items)
+
+ # Use a consistent file name that is unlikely to conflict with any imported
+ # proto files.
+ fields_hash = hashlib.sha1()
+ for f_name, f_type in field_items:
+ fields_hash.update(f_name.encode('utf-8'))
+ fields_hash.update(str(f_type).encode('utf-8'))
+ proto_file_name = fields_hash.hexdigest() + '.proto'
+
+ # If the proto is anonymous, use the same hash to name it.
+ if full_name is None:
+ full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' +
+ fields_hash.hexdigest())
+ try:
+ proto_cls = _GetMessageFromFactory(pool_instance, full_name)
+ return proto_cls
+ except KeyError:
+ # The factory's DescriptorPool doesn't know about this class yet.
+ pass
+
+ # This is the first time we see this proto: add a new descriptor to the pool.
+ pool_instance.Add(
+ _MakeFileDescriptorProto(proto_file_name, full_name, field_items))
+ return _GetMessageFromFactory(pool_instance, full_name)
+
+
+def _MakeFileDescriptorProto(proto_file_name, full_name, field_items):
+ """Populate FileDescriptorProto for MessageFactory's DescriptorPool."""
+ package, name = full_name.rsplit('.', 1)
+ file_proto = descriptor_pb2.FileDescriptorProto()
+ file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name)
+ file_proto.package = package
+ desc_proto = file_proto.message_type.add()
+ desc_proto.name = name
+ for f_number, (f_name, f_type) in enumerate(field_items, 1):
+ field_proto = desc_proto.field.add()
+ field_proto.name = f_name
+ # # If the number falls in the reserved range, reassign it to the correct
+ # # number after the range.
+ if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER:
+ f_number += (
+ descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER -
+ descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1)
+ field_proto.number = f_number
+ field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
+ field_proto.type = f_type
+ return file_proto
diff --git a/Lib/site-packages/google/protobuf/pyext/__init__.py b/Lib/site-packages/google/protobuf/pyext/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/google/protobuf/pyext/cpp_message.py b/Lib/site-packages/google/protobuf/pyext/cpp_message.py
new file mode 100644
index 0000000..623b52f
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/pyext/cpp_message.py
@@ -0,0 +1,49 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Protocol message implementation hooks for C++ implementation.
+
+Contains helper functions used to create protocol message classes from
+Descriptor objects at runtime backed by the protocol buffer C++ API.
+"""
+
+__author__ = 'tibell@google.com (Johan Tibell)'
+
+from google.protobuf.internal import api_implementation
+
+
+# pylint: disable=protected-access
+_message = api_implementation._c_module
+# TODO: Remove this import after fix api_implementation
+if _message is None:
+ from google.protobuf.pyext import _message
+
+
+class GeneratedProtocolMessageType(_message.MessageMeta):
+
+ """Metaclass for protocol message classes created at runtime from Descriptors.
+
+ The protocol compiler currently uses this metaclass to create protocol
+ message classes at runtime. Clients can also manually create their own
+ classes at runtime, as in this example:
+
+ mydescriptor = Descriptor(.....)
+ factory = symbol_database.Default()
+ factory.pool.AddDescriptor(mydescriptor)
+ MyProtoClass = factory.GetPrototype(mydescriptor)
+ myproto_instance = MyProtoClass()
+ myproto.foo_field = 23
+ ...
+
+ The above example will not work for nested types. If you wish to include them,
+ use reflection.MakeClass() instead of manually instantiating the class in
+ order to create the appropriate class structure.
+ """
+
+ # Must be consistent with the protocol-compiler code in
+ # proto2/compiler/internal/generator.*.
+ _DESCRIPTOR_KEY = 'DESCRIPTOR'
diff --git a/Lib/site-packages/google/protobuf/reflection.py b/Lib/site-packages/google/protobuf/reflection.py
new file mode 100644
index 0000000..2089f01
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/reflection.py
@@ -0,0 +1,72 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+# This code is meant to work on Python 2.4 and above only.
+
+"""Contains a metaclass and helper functions used to create
+protocol message classes from Descriptor objects at runtime.
+
+Recall that a metaclass is the "type" of a class.
+(A class is to a metaclass what an instance is to a class.)
+
+In this case, we use the GeneratedProtocolMessageType metaclass
+to inject all the useful functionality into the classes
+output by the protocol compiler at compile-time.
+
+The upshot of all this is that the real implementation
+details for ALL pure-Python protocol buffers are *here in
+this file*.
+"""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+
+from google.protobuf import message_factory
+from google.protobuf import symbol_database
+
+# The type of all Message classes.
+# Part of the public interface, but normally only used by message factories.
+GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE
+
+MESSAGE_CLASS_CACHE = {}
+
+
+# Deprecated. Please NEVER use reflection.ParseMessage().
+def ParseMessage(descriptor, byte_str):
+ """Generate a new Message instance from this Descriptor and a byte string.
+
+ DEPRECATED: ParseMessage is deprecated because it is using MakeClass().
+ Please use MessageFactory.GetPrototype() instead.
+
+ Args:
+ descriptor: Protobuf Descriptor object
+ byte_str: Serialized protocol buffer byte string
+
+ Returns:
+ Newly created protobuf Message object.
+ """
+ result_class = MakeClass(descriptor)
+ new_msg = result_class()
+ new_msg.ParseFromString(byte_str)
+ return new_msg
+
+
+# Deprecated. Please NEVER use reflection.MakeClass().
+def MakeClass(descriptor):
+ """Construct a class object for a protobuf described by descriptor.
+
+ DEPRECATED: use MessageFactory.GetPrototype() instead.
+
+ Args:
+ descriptor: A descriptor.Descriptor object describing the protobuf.
+ Returns:
+ The Message class object described by the descriptor.
+ """
+ # Original implementation leads to duplicate message classes, which won't play
+ # well with extensions. Message factory info is also missing.
+ # Redirect to message_factory.
+ return message_factory.GetMessageClass(descriptor)
diff --git a/Lib/site-packages/google/protobuf/service.py b/Lib/site-packages/google/protobuf/service.py
new file mode 100644
index 0000000..d3e1920
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/service.py
@@ -0,0 +1,205 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""DEPRECATED: Declares the RPC service interfaces.
+
+This module declares the abstract interfaces underlying proto2 RPC
+services. These are intended to be independent of any particular RPC
+implementation, so that proto2 services can be used on top of a variety
+of implementations. Starting with version 2.3.0, RPC implementations should
+not try to build on these, but should instead provide code generator plugins
+which generate code specific to the particular RPC implementation. This way
+the generated code can be more appropriate for the implementation in use
+and can avoid unnecessary layers of indirection.
+"""
+
+__author__ = 'petar@google.com (Petar Petrov)'
+
+
+class RpcException(Exception):
+ """Exception raised on failed blocking RPC method call."""
+ pass
+
+
+class Service(object):
+
+ """Abstract base interface for protocol-buffer-based RPC services.
+
+ Services themselves are abstract classes (implemented either by servers or as
+ stubs), but they subclass this base interface. The methods of this
+ interface can be used to call the methods of the service without knowing
+ its exact type at compile time (analogous to the Message interface).
+ """
+
+ def GetDescriptor():
+ """Retrieves this service's descriptor."""
+ raise NotImplementedError
+
+ def CallMethod(self, method_descriptor, rpc_controller,
+ request, done):
+ """Calls a method of the service specified by method_descriptor.
+
+ If "done" is None then the call is blocking and the response
+ message will be returned directly. Otherwise the call is asynchronous
+ and "done" will later be called with the response value.
+
+ In the blocking case, RpcException will be raised on error.
+
+ Preconditions:
+
+ * method_descriptor.service == GetDescriptor
+ * request is of the exact same classes as returned by
+ GetRequestClass(method).
+ * After the call has started, the request must not be modified.
+ * "rpc_controller" is of the correct type for the RPC implementation being
+ used by this Service. For stubs, the "correct type" depends on the
+ RpcChannel which the stub is using.
+
+ Postconditions:
+
+ * "done" will be called when the method is complete. This may be
+ before CallMethod() returns or it may be at some point in the future.
+ * If the RPC failed, the response value passed to "done" will be None.
+ Further details about the failure can be found by querying the
+ RpcController.
+ """
+ raise NotImplementedError
+
+ def GetRequestClass(self, method_descriptor):
+ """Returns the class of the request message for the specified method.
+
+ CallMethod() requires that the request is of a particular subclass of
+ Message. GetRequestClass() gets the default instance of this required
+ type.
+
+ Example:
+ method = service.GetDescriptor().FindMethodByName("Foo")
+ request = stub.GetRequestClass(method)()
+ request.ParseFromString(input)
+ service.CallMethod(method, request, callback)
+ """
+ raise NotImplementedError
+
+ def GetResponseClass(self, method_descriptor):
+ """Returns the class of the response message for the specified method.
+
+ This method isn't really needed, as the RpcChannel's CallMethod constructs
+ the response protocol message. It's provided anyway in case it is useful
+ for the caller to know the response type in advance.
+ """
+ raise NotImplementedError
+
+
+class RpcController(object):
+
+ """An RpcController mediates a single method call.
+
+ The primary purpose of the controller is to provide a way to manipulate
+ settings specific to the RPC implementation and to find out about RPC-level
+ errors. The methods provided by the RpcController interface are intended
+ to be a "least common denominator" set of features which we expect all
+ implementations to support. Specific implementations may provide more
+ advanced features (e.g. deadline propagation).
+ """
+
+ # Client-side methods below
+
+ def Reset(self):
+ """Resets the RpcController to its initial state.
+
+ After the RpcController has been reset, it may be reused in
+ a new call. Must not be called while an RPC is in progress.
+ """
+ raise NotImplementedError
+
+ def Failed(self):
+ """Returns true if the call failed.
+
+ After a call has finished, returns true if the call failed. The possible
+ reasons for failure depend on the RPC implementation. Failed() must not
+ be called before a call has finished. If Failed() returns true, the
+ contents of the response message are undefined.
+ """
+ raise NotImplementedError
+
+ def ErrorText(self):
+ """If Failed is true, returns a human-readable description of the error."""
+ raise NotImplementedError
+
+ def StartCancel(self):
+ """Initiate cancellation.
+
+ Advises the RPC system that the caller desires that the RPC call be
+ canceled. The RPC system may cancel it immediately, may wait awhile and
+ then cancel it, or may not even cancel the call at all. If the call is
+ canceled, the "done" callback will still be called and the RpcController
+ will indicate that the call failed at that time.
+ """
+ raise NotImplementedError
+
+ # Server-side methods below
+
+ def SetFailed(self, reason):
+ """Sets a failure reason.
+
+ Causes Failed() to return true on the client side. "reason" will be
+ incorporated into the message returned by ErrorText(). If you find
+ you need to return machine-readable information about failures, you
+ should incorporate it into your response protocol buffer and should
+ NOT call SetFailed().
+ """
+ raise NotImplementedError
+
+ def IsCanceled(self):
+ """Checks if the client cancelled the RPC.
+
+ If true, indicates that the client canceled the RPC, so the server may
+ as well give up on replying to it. The server should still call the
+ final "done" callback.
+ """
+ raise NotImplementedError
+
+ def NotifyOnCancel(self, callback):
+ """Sets a callback to invoke on cancel.
+
+ Asks that the given callback be called when the RPC is canceled. The
+ callback will always be called exactly once. If the RPC completes without
+ being canceled, the callback will be called after completion. If the RPC
+ has already been canceled when NotifyOnCancel() is called, the callback
+ will be called immediately.
+
+ NotifyOnCancel() must be called no more than once per request.
+ """
+ raise NotImplementedError
+
+
+class RpcChannel(object):
+
+ """Abstract interface for an RPC channel.
+
+ An RpcChannel represents a communication line to a service which can be used
+ to call that service's methods. The service may be running on another
+ machine. Normally, you should not use an RpcChannel directly, but instead
+ construct a stub {@link Service} wrapping it. Example:
+
+ Example:
+ RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234")
+ RpcController controller = rpcImpl.Controller()
+ MyService service = MyService_Stub(channel)
+ service.MyMethod(controller, request, callback)
+ """
+
+ def CallMethod(self, method_descriptor, rpc_controller,
+ request, response_class, done):
+ """Calls the method identified by the descriptor.
+
+ Call the given method of the remote service. The signature of this
+ procedure looks the same as Service.CallMethod(), but the requirements
+ are less strict in one important way: the request object doesn't have to
+ be of any specific class as long as its descriptor is method.input_type.
+ """
+ raise NotImplementedError
diff --git a/Lib/site-packages/google/protobuf/service_reflection.py b/Lib/site-packages/google/protobuf/service_reflection.py
new file mode 100644
index 0000000..7ba3d0b
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/service_reflection.py
@@ -0,0 +1,272 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Contains metaclasses used to create protocol service and service stub
+classes from ServiceDescriptor objects at runtime.
+
+The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to
+inject all useful functionality into the classes output by the protocol
+compiler at compile-time.
+"""
+
+__author__ = 'petar@google.com (Petar Petrov)'
+
+
+class GeneratedServiceType(type):
+
+ """Metaclass for service classes created at runtime from ServiceDescriptors.
+
+ Implementations for all methods described in the Service class are added here
+ by this class. We also create properties to allow getting/setting all fields
+ in the protocol message.
+
+ The protocol compiler currently uses this metaclass to create protocol service
+ classes at runtime. Clients can also manually create their own classes at
+ runtime, as in this example::
+
+ mydescriptor = ServiceDescriptor(.....)
+ class MyProtoService(service.Service):
+ __metaclass__ = GeneratedServiceType
+ DESCRIPTOR = mydescriptor
+ myservice_instance = MyProtoService()
+ # ...
+ """
+
+ _DESCRIPTOR_KEY = 'DESCRIPTOR'
+
+ def __init__(cls, name, bases, dictionary):
+ """Creates a message service class.
+
+ Args:
+ name: Name of the class (ignored, but required by the metaclass
+ protocol).
+ bases: Base classes of the class being constructed.
+ dictionary: The class dictionary of the class being constructed.
+ dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
+ describing this protocol service type.
+ """
+ # Don't do anything if this class doesn't have a descriptor. This happens
+ # when a service class is subclassed.
+ if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary:
+ return
+
+ descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY]
+ service_builder = _ServiceBuilder(descriptor)
+ service_builder.BuildService(cls)
+ cls.DESCRIPTOR = descriptor
+
+
+class GeneratedServiceStubType(GeneratedServiceType):
+
+ """Metaclass for service stubs created at runtime from ServiceDescriptors.
+
+ This class has similar responsibilities as GeneratedServiceType, except that
+ it creates the service stub classes.
+ """
+
+ _DESCRIPTOR_KEY = 'DESCRIPTOR'
+
+ def __init__(cls, name, bases, dictionary):
+ """Creates a message service stub class.
+
+ Args:
+ name: Name of the class (ignored, here).
+ bases: Base classes of the class being constructed.
+ dictionary: The class dictionary of the class being constructed.
+ dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
+ describing this protocol service type.
+ """
+ super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary)
+ # Don't do anything if this class doesn't have a descriptor. This happens
+ # when a service stub is subclassed.
+ if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary:
+ return
+
+ descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY]
+ service_stub_builder = _ServiceStubBuilder(descriptor)
+ service_stub_builder.BuildServiceStub(cls)
+
+
+class _ServiceBuilder(object):
+
+ """This class constructs a protocol service class using a service descriptor.
+
+ Given a service descriptor, this class constructs a class that represents
+ the specified service descriptor. One service builder instance constructs
+ exactly one service class. That means all instances of that class share the
+ same builder.
+ """
+
+ def __init__(self, service_descriptor):
+ """Initializes an instance of the service class builder.
+
+ Args:
+ service_descriptor: ServiceDescriptor to use when constructing the
+ service class.
+ """
+ self.descriptor = service_descriptor
+
+ def BuildService(builder, cls):
+ """Constructs the service class.
+
+ Args:
+ cls: The class that will be constructed.
+ """
+
+ # CallMethod needs to operate with an instance of the Service class. This
+ # internal wrapper function exists only to be able to pass the service
+ # instance to the method that does the real CallMethod work.
+ # Making sure to use exact argument names from the abstract interface in
+ # service.py to match the type signature
+ def _WrapCallMethod(self, method_descriptor, rpc_controller, request, done):
+ return builder._CallMethod(self, method_descriptor, rpc_controller,
+ request, done)
+
+ def _WrapGetRequestClass(self, method_descriptor):
+ return builder._GetRequestClass(method_descriptor)
+
+ def _WrapGetResponseClass(self, method_descriptor):
+ return builder._GetResponseClass(method_descriptor)
+
+ builder.cls = cls
+ cls.CallMethod = _WrapCallMethod
+ cls.GetDescriptor = staticmethod(lambda: builder.descriptor)
+ cls.GetDescriptor.__doc__ = 'Returns the service descriptor.'
+ cls.GetRequestClass = _WrapGetRequestClass
+ cls.GetResponseClass = _WrapGetResponseClass
+ for method in builder.descriptor.methods:
+ setattr(cls, method.name, builder._GenerateNonImplementedMethod(method))
+
+ def _CallMethod(self, srvc, method_descriptor,
+ rpc_controller, request, callback):
+ """Calls the method described by a given method descriptor.
+
+ Args:
+ srvc: Instance of the service for which this method is called.
+ method_descriptor: Descriptor that represent the method to call.
+ rpc_controller: RPC controller to use for this method's execution.
+ request: Request protocol message.
+ callback: A callback to invoke after the method has completed.
+ """
+ if method_descriptor.containing_service != self.descriptor:
+ raise RuntimeError(
+ 'CallMethod() given method descriptor for wrong service type.')
+ method = getattr(srvc, method_descriptor.name)
+ return method(rpc_controller, request, callback)
+
+ def _GetRequestClass(self, method_descriptor):
+ """Returns the class of the request protocol message.
+
+ Args:
+ method_descriptor: Descriptor of the method for which to return the
+ request protocol message class.
+
+ Returns:
+ A class that represents the input protocol message of the specified
+ method.
+ """
+ if method_descriptor.containing_service != self.descriptor:
+ raise RuntimeError(
+ 'GetRequestClass() given method descriptor for wrong service type.')
+ return method_descriptor.input_type._concrete_class
+
+ def _GetResponseClass(self, method_descriptor):
+ """Returns the class of the response protocol message.
+
+ Args:
+ method_descriptor: Descriptor of the method for which to return the
+ response protocol message class.
+
+ Returns:
+ A class that represents the output protocol message of the specified
+ method.
+ """
+ if method_descriptor.containing_service != self.descriptor:
+ raise RuntimeError(
+ 'GetResponseClass() given method descriptor for wrong service type.')
+ return method_descriptor.output_type._concrete_class
+
+ def _GenerateNonImplementedMethod(self, method):
+ """Generates and returns a method that can be set for a service methods.
+
+ Args:
+ method: Descriptor of the service method for which a method is to be
+ generated.
+
+ Returns:
+ A method that can be added to the service class.
+ """
+ return lambda inst, rpc_controller, request, callback: (
+ self._NonImplementedMethod(method.name, rpc_controller, callback))
+
+ def _NonImplementedMethod(self, method_name, rpc_controller, callback):
+ """The body of all methods in the generated service class.
+
+ Args:
+ method_name: Name of the method being executed.
+ rpc_controller: RPC controller used to execute this method.
+ callback: A callback which will be invoked when the method finishes.
+ """
+ rpc_controller.SetFailed('Method %s not implemented.' % method_name)
+ callback(None)
+
+
+class _ServiceStubBuilder(object):
+
+ """Constructs a protocol service stub class using a service descriptor.
+
+ Given a service descriptor, this class constructs a suitable stub class.
+ A stub is just a type-safe wrapper around an RpcChannel which emulates a
+ local implementation of the service.
+
+ One service stub builder instance constructs exactly one class. It means all
+ instances of that class share the same service stub builder.
+ """
+
+ def __init__(self, service_descriptor):
+ """Initializes an instance of the service stub class builder.
+
+ Args:
+ service_descriptor: ServiceDescriptor to use when constructing the
+ stub class.
+ """
+ self.descriptor = service_descriptor
+
+ def BuildServiceStub(self, cls):
+ """Constructs the stub class.
+
+ Args:
+ cls: The class that will be constructed.
+ """
+
+ def _ServiceStubInit(stub, rpc_channel):
+ stub.rpc_channel = rpc_channel
+ self.cls = cls
+ cls.__init__ = _ServiceStubInit
+ for method in self.descriptor.methods:
+ setattr(cls, method.name, self._GenerateStubMethod(method))
+
+ def _GenerateStubMethod(self, method):
+ return (lambda inst, rpc_controller, request, callback=None:
+ self._StubMethod(inst, method, rpc_controller, request, callback))
+
+ def _StubMethod(self, stub, method_descriptor,
+ rpc_controller, request, callback):
+ """The body of all service methods in the generated stub class.
+
+ Args:
+ stub: Stub instance.
+ method_descriptor: Descriptor of the invoked method.
+ rpc_controller: Rpc controller to execute the method.
+ request: Request protocol message.
+ callback: A callback to execute when the method finishes.
+ Returns:
+ Response message (in case of blocking call).
+ """
+ return stub.rpc_channel.CallMethod(
+ method_descriptor, rpc_controller, request,
+ method_descriptor.output_type._concrete_class, callback)
diff --git a/Lib/site-packages/google/protobuf/source_context_pb2.py b/Lib/site-packages/google/protobuf/source_context_pb2.py
new file mode 100644
index 0000000..fa66e4b
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/source_context_pb2.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/source_context.proto
+# Protobuf Python Version: 4.25.2
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\",\n\rSourceContext\x12\x1b\n\tfile_name\x18\x01 \x01(\tR\x08\x66ileNameB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ _globals['DESCRIPTOR']._options = None
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _globals['_SOURCECONTEXT']._serialized_start=57
+ _globals['_SOURCECONTEXT']._serialized_end=101
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/protobuf/struct_pb2.py b/Lib/site-packages/google/protobuf/struct_pb2.py
new file mode 100644
index 0000000..c13099a
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/struct_pb2.py
@@ -0,0 +1,37 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/struct.proto
+# Protobuf Python Version: 4.25.2
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x98\x01\n\x06Struct\x12;\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntryR\x06\x66ields\x1aQ\n\x0b\x46ieldsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x05value:\x02\x38\x01\"\xb2\x02\n\x05Value\x12;\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00R\tnullValue\x12#\n\x0cnumber_value\x18\x02 \x01(\x01H\x00R\x0bnumberValue\x12#\n\x0cstring_value\x18\x03 \x01(\tH\x00R\x0bstringValue\x12\x1f\n\nbool_value\x18\x04 \x01(\x08H\x00R\tboolValue\x12<\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00R\x0bstructValue\x12;\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00R\tlistValueB\x06\n\x04kind\";\n\tListValue\x12.\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.ValueR\x06values*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ _globals['DESCRIPTOR']._options = None
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _globals['_STRUCT_FIELDSENTRY']._options = None
+ _globals['_STRUCT_FIELDSENTRY']._serialized_options = b'8\001'
+ _globals['_NULLVALUE']._serialized_start=574
+ _globals['_NULLVALUE']._serialized_end=601
+ _globals['_STRUCT']._serialized_start=50
+ _globals['_STRUCT']._serialized_end=202
+ _globals['_STRUCT_FIELDSENTRY']._serialized_start=121
+ _globals['_STRUCT_FIELDSENTRY']._serialized_end=202
+ _globals['_VALUE']._serialized_start=205
+ _globals['_VALUE']._serialized_end=511
+ _globals['_LISTVALUE']._serialized_start=513
+ _globals['_LISTVALUE']._serialized_end=572
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/protobuf/symbol_database.py b/Lib/site-packages/google/protobuf/symbol_database.py
new file mode 100644
index 0000000..1941e81
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/symbol_database.py
@@ -0,0 +1,197 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A database of Python protocol buffer generated symbols.
+
+SymbolDatabase is the MessageFactory for messages generated at compile time,
+and makes it easy to create new instances of a registered type, given only the
+type's protocol buffer symbol name.
+
+Example usage::
+
+ db = symbol_database.SymbolDatabase()
+
+ # Register symbols of interest, from one or multiple files.
+ db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR)
+ db.RegisterMessage(my_proto_pb2.MyMessage)
+ db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR)
+
+ # The database can be used as a MessageFactory, to generate types based on
+ # their name:
+ types = db.GetMessages(['my_proto.proto'])
+ my_message_instance = types['MyMessage']()
+
+ # The database's underlying descriptor pool can be queried, so it's not
+ # necessary to know a type's filename to be able to generate it:
+ filename = db.pool.FindFileContainingSymbol('MyMessage')
+ my_message_instance = db.GetMessages([filename])['MyMessage']()
+
+ # This functionality is also provided directly via a convenience method:
+ my_message_instance = db.GetSymbol('MyMessage')()
+"""
+
+import warnings
+
+from google.protobuf.internal import api_implementation
+from google.protobuf import descriptor_pool
+from google.protobuf import message_factory
+
+
+class SymbolDatabase():
+ """A database of Python generated symbols."""
+
+ # local cache of registered classes.
+ _classes = {}
+
+ def __init__(self, pool=None):
+ """Initializes a new SymbolDatabase."""
+ self.pool = pool or descriptor_pool.DescriptorPool()
+
+ def GetPrototype(self, descriptor):
+ warnings.warn('SymbolDatabase.GetPrototype() is deprecated. Please '
+ 'use message_factory.GetMessageClass() instead. '
+ 'SymbolDatabase.GetPrototype() will be removed soon.')
+ return message_factory.GetMessageClass(descriptor)
+
+ def CreatePrototype(self, descriptor):
+ warnings.warn('Directly call CreatePrototype() is wrong. Please use '
+ 'message_factory.GetMessageClass() instead. '
+ 'SymbolDatabase.CreatePrototype() will be removed soon.')
+ return message_factory._InternalCreateMessageClass(descriptor)
+
+ def GetMessages(self, files):
+ warnings.warn('SymbolDatabase.GetMessages() is deprecated. Please use '
+ 'message_factory.GetMessageClassedForFiles() instead. '
+ 'SymbolDatabase.GetMessages() will be removed soon.')
+ return message_factory.GetMessageClassedForFiles(files, self.pool)
+
+ def RegisterMessage(self, message):
+ """Registers the given message type in the local database.
+
+ Calls to GetSymbol() and GetMessages() will return messages registered here.
+
+ Args:
+ message: A :class:`google.protobuf.message.Message` subclass (or
+ instance); its descriptor will be registered.
+
+ Returns:
+ The provided message.
+ """
+
+ desc = message.DESCRIPTOR
+ self._classes[desc] = message
+ self.RegisterMessageDescriptor(desc)
+ return message
+
+ def RegisterMessageDescriptor(self, message_descriptor):
+ """Registers the given message descriptor in the local database.
+
+ Args:
+ message_descriptor (Descriptor): the message descriptor to add.
+ """
+ if api_implementation.Type() == 'python':
+ # pylint: disable=protected-access
+ self.pool._AddDescriptor(message_descriptor)
+
+ def RegisterEnumDescriptor(self, enum_descriptor):
+ """Registers the given enum descriptor in the local database.
+
+ Args:
+ enum_descriptor (EnumDescriptor): The enum descriptor to register.
+
+ Returns:
+ EnumDescriptor: The provided descriptor.
+ """
+ if api_implementation.Type() == 'python':
+ # pylint: disable=protected-access
+ self.pool._AddEnumDescriptor(enum_descriptor)
+ return enum_descriptor
+
+ def RegisterServiceDescriptor(self, service_descriptor):
+ """Registers the given service descriptor in the local database.
+
+ Args:
+ service_descriptor (ServiceDescriptor): the service descriptor to
+ register.
+ """
+ if api_implementation.Type() == 'python':
+ # pylint: disable=protected-access
+ self.pool._AddServiceDescriptor(service_descriptor)
+
+ def RegisterFileDescriptor(self, file_descriptor):
+ """Registers the given file descriptor in the local database.
+
+ Args:
+ file_descriptor (FileDescriptor): The file descriptor to register.
+ """
+ if api_implementation.Type() == 'python':
+ # pylint: disable=protected-access
+ self.pool._InternalAddFileDescriptor(file_descriptor)
+
+ def GetSymbol(self, symbol):
+ """Tries to find a symbol in the local database.
+
+ Currently, this method only returns message.Message instances, however, if
+ may be extended in future to support other symbol types.
+
+ Args:
+ symbol (str): a protocol buffer symbol.
+
+ Returns:
+ A Python class corresponding to the symbol.
+
+ Raises:
+ KeyError: if the symbol could not be found.
+ """
+
+ return self._classes[self.pool.FindMessageTypeByName(symbol)]
+
+ def GetMessages(self, files):
+ # TODO: Fix the differences with MessageFactory.
+ """Gets all registered messages from a specified file.
+
+ Only messages already created and registered will be returned; (this is the
+ case for imported _pb2 modules)
+ But unlike MessageFactory, this version also returns already defined nested
+ messages, but does not register any message extensions.
+
+ Args:
+ files (list[str]): The file names to extract messages from.
+
+ Returns:
+ A dictionary mapping proto names to the message classes.
+
+ Raises:
+ KeyError: if a file could not be found.
+ """
+
+ def _GetAllMessages(desc):
+ """Walk a message Descriptor and recursively yields all message names."""
+ yield desc
+ for msg_desc in desc.nested_types:
+ for nested_desc in _GetAllMessages(msg_desc):
+ yield nested_desc
+
+ result = {}
+ for file_name in files:
+ file_desc = self.pool.FindFileByName(file_name)
+ for msg_desc in file_desc.message_types_by_name.values():
+ for desc in _GetAllMessages(msg_desc):
+ try:
+ result[desc.full_name] = self._classes[desc]
+ except KeyError:
+ # This descriptor has no registered class, skip it.
+ pass
+ return result
+
+
+_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default())
+
+
+def Default():
+ """Returns the default SymbolDatabase."""
+ return _DEFAULT
diff --git a/Lib/site-packages/google/protobuf/testdata/__init__.py b/Lib/site-packages/google/protobuf/testdata/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/google/protobuf/text_encoding.py b/Lib/site-packages/google/protobuf/text_encoding.py
new file mode 100644
index 0000000..d454987
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/text_encoding.py
@@ -0,0 +1,85 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Encoding related utilities."""
+import re
+
+_cescape_chr_to_symbol_map = {}
+_cescape_chr_to_symbol_map[9] = r'\t' # optional escape
+_cescape_chr_to_symbol_map[10] = r'\n' # optional escape
+_cescape_chr_to_symbol_map[13] = r'\r' # optional escape
+_cescape_chr_to_symbol_map[34] = r'\"' # necessary escape
+_cescape_chr_to_symbol_map[39] = r"\'" # optional escape
+_cescape_chr_to_symbol_map[92] = r'\\' # necessary escape
+
+# Lookup table for unicode
+_cescape_unicode_to_str = [chr(i) for i in range(0, 256)]
+for byte, string in _cescape_chr_to_symbol_map.items():
+ _cescape_unicode_to_str[byte] = string
+
+# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32)
+_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] +
+ [chr(i) for i in range(32, 127)] +
+ [r'\%03o' % i for i in range(127, 256)])
+for byte, string in _cescape_chr_to_symbol_map.items():
+ _cescape_byte_to_str[byte] = string
+del byte, string
+
+
+def CEscape(text, as_utf8) -> str:
+ """Escape a bytes string for use in an text protocol buffer.
+
+ Args:
+ text: A byte string to be escaped.
+ as_utf8: Specifies if result may contain non-ASCII characters.
+ In Python 3 this allows unescaped non-ASCII Unicode characters.
+ In Python 2 the return value will be valid UTF-8 rather than only ASCII.
+ Returns:
+ Escaped string (str).
+ """
+ # Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not
+ # satisfy our needs; they encodes unprintable characters using two-digit hex
+ # escapes whereas our C++ unescaping function allows hex escapes to be any
+ # length. So, "\0011".encode('string_escape') ends up being "\\x011", which
+ # will be decoded in C++ as a single-character string with char code 0x11.
+ text_is_unicode = isinstance(text, str)
+ if as_utf8 and text_is_unicode:
+ # We're already unicode, no processing beyond control char escapes.
+ return text.translate(_cescape_chr_to_symbol_map)
+ ord_ = ord if text_is_unicode else lambda x: x # bytes iterate as ints.
+ if as_utf8:
+ return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text)
+ return ''.join(_cescape_byte_to_str[ord_(c)] for c in text)
+
+
+_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])')
+
+
+def CUnescape(text: str) -> bytes:
+ """Unescape a text string with C-style escape sequences to UTF-8 bytes.
+
+ Args:
+ text: The data to parse in a str.
+ Returns:
+ A byte string.
+ """
+
+ def ReplaceHex(m):
+ # Only replace the match if the number of leading back slashes is odd. i.e.
+ # the slash itself is not escaped.
+ if len(m.group(1)) & 1:
+ return m.group(1) + 'x0' + m.group(2)
+ return m.group(0)
+
+ # This is required because the 'string_escape' encoding doesn't
+ # allow single-digit hex escapes (like '\xf').
+ result = _CUNESCAPE_HEX.sub(ReplaceHex, text)
+
+ return (result.encode('utf-8') # Make it bytes to allow decode.
+ .decode('unicode_escape')
+ # Make it bytes again to return the proper type.
+ .encode('raw_unicode_escape'))
diff --git a/Lib/site-packages/google/protobuf/text_format.py b/Lib/site-packages/google/protobuf/text_format.py
new file mode 100644
index 0000000..b448f66
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/text_format.py
@@ -0,0 +1,1834 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Contains routines for printing protocol messages in text format.
+
+Simple usage example::
+
+ # Create a proto object and serialize it to a text proto string.
+ message = my_proto_pb2.MyMessage(foo='bar')
+ text_proto = text_format.MessageToString(message)
+
+ # Parse a text proto string.
+ message = text_format.Parse(text_proto, my_proto_pb2.MyMessage())
+"""
+
+__author__ = 'kenton@google.com (Kenton Varda)'
+
+# TODO Import thread contention leads to test failures.
+import encodings.raw_unicode_escape # pylint: disable=unused-import
+import encodings.unicode_escape # pylint: disable=unused-import
+import io
+import math
+import re
+
+from google.protobuf.internal import decoder
+from google.protobuf.internal import type_checkers
+from google.protobuf import descriptor
+from google.protobuf import text_encoding
+from google.protobuf import unknown_fields
+
+# pylint: disable=g-import-not-at-top
+__all__ = ['MessageToString', 'Parse', 'PrintMessage', 'PrintField',
+ 'PrintFieldValue', 'Merge', 'MessageToBytes']
+
+_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(),
+ type_checkers.Int32ValueChecker(),
+ type_checkers.Uint64ValueChecker(),
+ type_checkers.Int64ValueChecker())
+_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?$', re.IGNORECASE)
+_FLOAT_NAN = re.compile('nanf?$', re.IGNORECASE)
+_QUOTES = frozenset(("'", '"'))
+_ANY_FULL_TYPE_NAME = 'google.protobuf.Any'
+_DEBUG_STRING_SILENT_MARKER = '\t '
+
+
+class Error(Exception):
+ """Top-level module error for text_format."""
+
+
+class ParseError(Error):
+ """Thrown in case of text parsing or tokenizing error."""
+
+ def __init__(self, message=None, line=None, column=None):
+ if message is not None and line is not None:
+ loc = str(line)
+ if column is not None:
+ loc += ':{0}'.format(column)
+ message = '{0} : {1}'.format(loc, message)
+ if message is not None:
+ super(ParseError, self).__init__(message)
+ else:
+ super(ParseError, self).__init__()
+ self._line = line
+ self._column = column
+
+ def GetLine(self):
+ return self._line
+
+ def GetColumn(self):
+ return self._column
+
+
+class TextWriter(object):
+
+ def __init__(self, as_utf8):
+ self._writer = io.StringIO()
+
+ def write(self, val):
+ return self._writer.write(val)
+
+ def close(self):
+ return self._writer.close()
+
+ def getvalue(self):
+ return self._writer.getvalue()
+
+
+def MessageToString(
+ message,
+ as_utf8=False,
+ as_one_line=False,
+ use_short_repeated_primitives=False,
+ pointy_brackets=False,
+ use_index_order=False,
+ float_format=None,
+ double_format=None,
+ use_field_number=False,
+ descriptor_pool=None,
+ indent=0,
+ message_formatter=None,
+ print_unknown_fields=False,
+ force_colon=False) -> str:
+ """Convert protobuf message to text format.
+
+ Double values can be formatted compactly with 15 digits of
+ precision (which is the most that IEEE 754 "double" can guarantee)
+ using double_format='.15g'. To ensure that converting to text and back to a
+ proto will result in an identical value, double_format='.17g' should be used.
+
+ Args:
+ message: The protocol buffers message.
+ as_utf8: Return unescaped Unicode for non-ASCII characters.
+ as_one_line: Don't introduce newlines between fields.
+ use_short_repeated_primitives: Use short repeated format for primitives.
+ pointy_brackets: If True, use angle brackets instead of curly braces for
+ nesting.
+ use_index_order: If True, fields of a proto message will be printed using
+ the order defined in source code instead of the field number, extensions
+ will be printed at the end of the message and their relative order is
+ determined by the extension number. By default, use the field number
+ order.
+ float_format (str): If set, use this to specify float field formatting
+ (per the "Format Specification Mini-Language"); otherwise, shortest float
+ that has same value in wire will be printed. Also affect double field
+ if double_format is not set but float_format is set.
+ double_format (str): If set, use this to specify double field formatting
+ (per the "Format Specification Mini-Language"); if it is not set but
+ float_format is set, use float_format. Otherwise, use ``str()``
+ use_field_number: If True, print field numbers instead of names.
+ descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types.
+ indent (int): The initial indent level, in terms of spaces, for pretty
+ print.
+ message_formatter (function(message, indent, as_one_line) -> unicode|None):
+ Custom formatter for selected sub-messages (usually based on message
+ type). Use to pretty print parts of the protobuf for easier diffing.
+ print_unknown_fields: If True, unknown fields will be printed.
+ force_colon: If set, a colon will be added after the field name even if the
+ field is a proto message.
+
+ Returns:
+ str: A string of the text formatted protocol buffer message.
+ """
+ out = TextWriter(as_utf8)
+ printer = _Printer(
+ out,
+ indent,
+ as_utf8,
+ as_one_line,
+ use_short_repeated_primitives,
+ pointy_brackets,
+ use_index_order,
+ float_format,
+ double_format,
+ use_field_number,
+ descriptor_pool,
+ message_formatter,
+ print_unknown_fields=print_unknown_fields,
+ force_colon=force_colon)
+ printer.PrintMessage(message)
+ result = out.getvalue()
+ out.close()
+ if as_one_line:
+ return result.rstrip()
+ return result
+
+
+def MessageToBytes(message, **kwargs) -> bytes:
+ """Convert protobuf message to encoded text format. See MessageToString."""
+ text = MessageToString(message, **kwargs)
+ if isinstance(text, bytes):
+ return text
+ codec = 'utf-8' if kwargs.get('as_utf8') else 'ascii'
+ return text.encode(codec)
+
+
+def _IsMapEntry(field):
+ return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
+ field.message_type.has_options and
+ field.message_type.GetOptions().map_entry)
+
+
+def PrintMessage(message,
+ out,
+ indent=0,
+ as_utf8=False,
+ as_one_line=False,
+ use_short_repeated_primitives=False,
+ pointy_brackets=False,
+ use_index_order=False,
+ float_format=None,
+ double_format=None,
+ use_field_number=False,
+ descriptor_pool=None,
+ message_formatter=None,
+ print_unknown_fields=False,
+ force_colon=False):
+ """Convert the message to text format and write it to the out stream.
+
+ Args:
+ message: The Message object to convert to text format.
+ out: A file handle to write the message to.
+ indent: The initial indent level for pretty print.
+ as_utf8: Return unescaped Unicode for non-ASCII characters.
+ as_one_line: Don't introduce newlines between fields.
+ use_short_repeated_primitives: Use short repeated format for primitives.
+ pointy_brackets: If True, use angle brackets instead of curly braces for
+ nesting.
+ use_index_order: If True, print fields of a proto message using the order
+ defined in source code instead of the field number. By default, use the
+ field number order.
+ float_format: If set, use this to specify float field formatting
+ (per the "Format Specification Mini-Language"); otherwise, shortest
+ float that has same value in wire will be printed. Also affect double
+ field if double_format is not set but float_format is set.
+ double_format: If set, use this to specify double field formatting
+ (per the "Format Specification Mini-Language"); if it is not set but
+ float_format is set, use float_format. Otherwise, str() is used.
+ use_field_number: If True, print field numbers instead of names.
+ descriptor_pool: A DescriptorPool used to resolve Any types.
+ message_formatter: A function(message, indent, as_one_line): unicode|None
+ to custom format selected sub-messages (usually based on message type).
+ Use to pretty print parts of the protobuf for easier diffing.
+ print_unknown_fields: If True, unknown fields will be printed.
+ force_colon: If set, a colon will be added after the field name even if
+ the field is a proto message.
+ """
+ printer = _Printer(
+ out=out, indent=indent, as_utf8=as_utf8,
+ as_one_line=as_one_line,
+ use_short_repeated_primitives=use_short_repeated_primitives,
+ pointy_brackets=pointy_brackets,
+ use_index_order=use_index_order,
+ float_format=float_format,
+ double_format=double_format,
+ use_field_number=use_field_number,
+ descriptor_pool=descriptor_pool,
+ message_formatter=message_formatter,
+ print_unknown_fields=print_unknown_fields,
+ force_colon=force_colon)
+ printer.PrintMessage(message)
+
+
+def PrintField(field,
+ value,
+ out,
+ indent=0,
+ as_utf8=False,
+ as_one_line=False,
+ use_short_repeated_primitives=False,
+ pointy_brackets=False,
+ use_index_order=False,
+ float_format=None,
+ double_format=None,
+ message_formatter=None,
+ print_unknown_fields=False,
+ force_colon=False):
+ """Print a single field name/value pair."""
+ printer = _Printer(out, indent, as_utf8, as_one_line,
+ use_short_repeated_primitives, pointy_brackets,
+ use_index_order, float_format, double_format,
+ message_formatter=message_formatter,
+ print_unknown_fields=print_unknown_fields,
+ force_colon=force_colon)
+ printer.PrintField(field, value)
+
+
+def PrintFieldValue(field,
+ value,
+ out,
+ indent=0,
+ as_utf8=False,
+ as_one_line=False,
+ use_short_repeated_primitives=False,
+ pointy_brackets=False,
+ use_index_order=False,
+ float_format=None,
+ double_format=None,
+ message_formatter=None,
+ print_unknown_fields=False,
+ force_colon=False):
+ """Print a single field value (not including name)."""
+ printer = _Printer(out, indent, as_utf8, as_one_line,
+ use_short_repeated_primitives, pointy_brackets,
+ use_index_order, float_format, double_format,
+ message_formatter=message_formatter,
+ print_unknown_fields=print_unknown_fields,
+ force_colon=force_colon)
+ printer.PrintFieldValue(field, value)
+
+
+def _BuildMessageFromTypeName(type_name, descriptor_pool):
+ """Returns a protobuf message instance.
+
+ Args:
+ type_name: Fully-qualified protobuf message type name string.
+ descriptor_pool: DescriptorPool instance.
+
+ Returns:
+ A Message instance of type matching type_name, or None if the a Descriptor
+ wasn't found matching type_name.
+ """
+ # pylint: disable=g-import-not-at-top
+ if descriptor_pool is None:
+ from google.protobuf import descriptor_pool as pool_mod
+ descriptor_pool = pool_mod.Default()
+ from google.protobuf import message_factory
+ try:
+ message_descriptor = descriptor_pool.FindMessageTypeByName(type_name)
+ except KeyError:
+ return None
+ message_type = message_factory.GetMessageClass(message_descriptor)
+ return message_type()
+
+
+# These values must match WireType enum in //google/protobuf/wire_format.h.
+WIRETYPE_LENGTH_DELIMITED = 2
+WIRETYPE_START_GROUP = 3
+
+
+class _Printer(object):
+ """Text format printer for protocol message."""
+
+ def __init__(
+ self,
+ out,
+ indent=0,
+ as_utf8=False,
+ as_one_line=False,
+ use_short_repeated_primitives=False,
+ pointy_brackets=False,
+ use_index_order=False,
+ float_format=None,
+ double_format=None,
+ use_field_number=False,
+ descriptor_pool=None,
+ message_formatter=None,
+ print_unknown_fields=False,
+ force_colon=False):
+ """Initialize the Printer.
+
+ Double values can be formatted compactly with 15 digits of precision
+ (which is the most that IEEE 754 "double" can guarantee) using
+ double_format='.15g'. To ensure that converting to text and back to a proto
+ will result in an identical value, double_format='.17g' should be used.
+
+ Args:
+ out: To record the text format result.
+ indent: The initial indent level for pretty print.
+ as_utf8: Return unescaped Unicode for non-ASCII characters.
+ as_one_line: Don't introduce newlines between fields.
+ use_short_repeated_primitives: Use short repeated format for primitives.
+ pointy_brackets: If True, use angle brackets instead of curly braces for
+ nesting.
+ use_index_order: If True, print fields of a proto message using the order
+ defined in source code instead of the field number. By default, use the
+ field number order.
+ float_format: If set, use this to specify float field formatting
+ (per the "Format Specification Mini-Language"); otherwise, shortest
+ float that has same value in wire will be printed. Also affect double
+ field if double_format is not set but float_format is set.
+ double_format: If set, use this to specify double field formatting
+ (per the "Format Specification Mini-Language"); if it is not set but
+ float_format is set, use float_format. Otherwise, str() is used.
+ use_field_number: If True, print field numbers instead of names.
+ descriptor_pool: A DescriptorPool used to resolve Any types.
+ message_formatter: A function(message, indent, as_one_line): unicode|None
+ to custom format selected sub-messages (usually based on message type).
+ Use to pretty print parts of the protobuf for easier diffing.
+ print_unknown_fields: If True, unknown fields will be printed.
+ force_colon: If set, a colon will be added after the field name even if
+ the field is a proto message.
+ """
+ self.out = out
+ self.indent = indent
+ self.as_utf8 = as_utf8
+ self.as_one_line = as_one_line
+ self.use_short_repeated_primitives = use_short_repeated_primitives
+ self.pointy_brackets = pointy_brackets
+ self.use_index_order = use_index_order
+ self.float_format = float_format
+ if double_format is not None:
+ self.double_format = double_format
+ else:
+ self.double_format = float_format
+ self.use_field_number = use_field_number
+ self.descriptor_pool = descriptor_pool
+ self.message_formatter = message_formatter
+ self.print_unknown_fields = print_unknown_fields
+ self.force_colon = force_colon
+
+ def _TryPrintAsAnyMessage(self, message):
+ """Serializes if message is a google.protobuf.Any field."""
+ if '/' not in message.type_url:
+ return False
+ packed_message = _BuildMessageFromTypeName(message.TypeName(),
+ self.descriptor_pool)
+ if packed_message:
+ packed_message.MergeFromString(message.value)
+ colon = ':' if self.force_colon else ''
+ self.out.write('%s[%s]%s ' % (self.indent * ' ', message.type_url, colon))
+ self._PrintMessageFieldValue(packed_message)
+ self.out.write(' ' if self.as_one_line else '\n')
+ return True
+ else:
+ return False
+
+ def _TryCustomFormatMessage(self, message):
+ formatted = self.message_formatter(message, self.indent, self.as_one_line)
+ if formatted is None:
+ return False
+
+ out = self.out
+ out.write(' ' * self.indent)
+ out.write(formatted)
+ out.write(' ' if self.as_one_line else '\n')
+ return True
+
+ def PrintMessage(self, message):
+ """Convert protobuf message to text format.
+
+ Args:
+ message: The protocol buffers message.
+ """
+ if self.message_formatter and self._TryCustomFormatMessage(message):
+ return
+ if (message.DESCRIPTOR.full_name == _ANY_FULL_TYPE_NAME and
+ self._TryPrintAsAnyMessage(message)):
+ return
+ fields = message.ListFields()
+ if self.use_index_order:
+ fields.sort(
+ key=lambda x: x[0].number if x[0].is_extension else x[0].index)
+ for field, value in fields:
+ if _IsMapEntry(field):
+ for key in sorted(value):
+ # This is slow for maps with submessage entries because it copies the
+ # entire tree. Unfortunately this would take significant refactoring
+ # of this file to work around.
+ #
+ # TODO: refactor and optimize if this becomes an issue.
+ entry_submsg = value.GetEntryClass()(key=key, value=value[key])
+ self.PrintField(field, entry_submsg)
+ elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+ if (self.use_short_repeated_primitives
+ and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE
+ and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_STRING):
+ self._PrintShortRepeatedPrimitivesValue(field, value)
+ else:
+ for element in value:
+ self.PrintField(field, element)
+ else:
+ self.PrintField(field, value)
+
+ if self.print_unknown_fields:
+ self._PrintUnknownFields(unknown_fields.UnknownFieldSet(message))
+
+ def _PrintUnknownFields(self, unknown_field_set):
+ """Print unknown fields."""
+ out = self.out
+ for field in unknown_field_set:
+ out.write(' ' * self.indent)
+ out.write(str(field.field_number))
+ if field.wire_type == WIRETYPE_START_GROUP:
+ if self.as_one_line:
+ out.write(' { ')
+ else:
+ out.write(' {\n')
+ self.indent += 2
+
+ self._PrintUnknownFields(field.data)
+
+ if self.as_one_line:
+ out.write('} ')
+ else:
+ self.indent -= 2
+ out.write(' ' * self.indent + '}\n')
+ elif field.wire_type == WIRETYPE_LENGTH_DELIMITED:
+ try:
+ # If this field is parseable as a Message, it is probably
+ # an embedded message.
+ # pylint: disable=protected-access
+ (embedded_unknown_message, pos) = decoder._DecodeUnknownFieldSet(
+ memoryview(field.data), 0, len(field.data))
+ except Exception: # pylint: disable=broad-except
+ pos = 0
+
+ if pos == len(field.data):
+ if self.as_one_line:
+ out.write(' { ')
+ else:
+ out.write(' {\n')
+ self.indent += 2
+
+ self._PrintUnknownFields(embedded_unknown_message)
+
+ if self.as_one_line:
+ out.write('} ')
+ else:
+ self.indent -= 2
+ out.write(' ' * self.indent + '}\n')
+ else:
+ # A string or bytes field. self.as_utf8 may not work.
+ out.write(': \"')
+ out.write(text_encoding.CEscape(field.data, False))
+ out.write('\" ' if self.as_one_line else '\"\n')
+ else:
+ # varint, fixed32, fixed64
+ out.write(': ')
+ out.write(str(field.data))
+ out.write(' ' if self.as_one_line else '\n')
+
+ def _PrintFieldName(self, field):
+ """Print field name."""
+ out = self.out
+ out.write(' ' * self.indent)
+ if self.use_field_number:
+ out.write(str(field.number))
+ else:
+ if field.is_extension:
+ out.write('[')
+ if (field.containing_type.GetOptions().message_set_wire_format and
+ field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
+ field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL):
+ out.write(field.message_type.full_name)
+ else:
+ out.write(field.full_name)
+ out.write(']')
+ elif field.type == descriptor.FieldDescriptor.TYPE_GROUP:
+ # For groups, use the capitalized name.
+ out.write(field.message_type.name)
+ else:
+ out.write(field.name)
+
+ if (self.force_colon or
+ field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE):
+ # The colon is optional in this case, but our cross-language golden files
+ # don't include it. Here, the colon is only included if force_colon is
+ # set to True
+ out.write(':')
+
+ def PrintField(self, field, value):
+ """Print a single field name/value pair."""
+ self._PrintFieldName(field)
+ self.out.write(' ')
+ self.PrintFieldValue(field, value)
+ self.out.write(' ' if self.as_one_line else '\n')
+
+ def _PrintShortRepeatedPrimitivesValue(self, field, value):
+ """"Prints short repeated primitives value."""
+ # Note: this is called only when value has at least one element.
+ self._PrintFieldName(field)
+ self.out.write(' [')
+ for i in range(len(value) - 1):
+ self.PrintFieldValue(field, value[i])
+ self.out.write(', ')
+ self.PrintFieldValue(field, value[-1])
+ self.out.write(']')
+ self.out.write(' ' if self.as_one_line else '\n')
+
+ def _PrintMessageFieldValue(self, value):
+ if self.pointy_brackets:
+ openb = '<'
+ closeb = '>'
+ else:
+ openb = '{'
+ closeb = '}'
+
+ if self.as_one_line:
+ self.out.write('%s ' % openb)
+ self.PrintMessage(value)
+ self.out.write(closeb)
+ else:
+ self.out.write('%s\n' % openb)
+ self.indent += 2
+ self.PrintMessage(value)
+ self.indent -= 2
+ self.out.write(' ' * self.indent + closeb)
+
+ def PrintFieldValue(self, field, value):
+ """Print a single field value (not including name).
+
+ For repeated fields, the value should be a single element.
+
+ Args:
+ field: The descriptor of the field to be printed.
+ value: The value of the field.
+ """
+ out = self.out
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
+ self._PrintMessageFieldValue(value)
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
+ enum_value = field.enum_type.values_by_number.get(value, None)
+ if enum_value is not None:
+ out.write(enum_value.name)
+ else:
+ out.write(str(value))
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
+ out.write('\"')
+ if isinstance(value, str) and not self.as_utf8:
+ out_value = value.encode('utf-8')
+ else:
+ out_value = value
+ if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
+ # We always need to escape all binary data in TYPE_BYTES fields.
+ out_as_utf8 = False
+ else:
+ out_as_utf8 = self.as_utf8
+ out.write(text_encoding.CEscape(out_value, out_as_utf8))
+ out.write('\"')
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
+ if value:
+ out.write('true')
+ else:
+ out.write('false')
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:
+ if self.float_format is not None:
+ out.write('{1:{0}}'.format(self.float_format, value))
+ else:
+ if math.isnan(value):
+ out.write(str(value))
+ else:
+ out.write(str(type_checkers.ToShortestFloat(value)))
+ elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_DOUBLE and
+ self.double_format is not None):
+ out.write('{1:{0}}'.format(self.double_format, value))
+ else:
+ out.write(str(value))
+
+
+def Parse(text,
+ message,
+ allow_unknown_extension=False,
+ allow_field_number=False,
+ descriptor_pool=None,
+ allow_unknown_field=False):
+ """Parses a text representation of a protocol message into a message.
+
+ NOTE: for historical reasons this function does not clear the input
+ message. This is different from what the binary msg.ParseFrom(...) does.
+ If text contains a field already set in message, the value is appended if the
+ field is repeated. Otherwise, an error is raised.
+
+ Example::
+
+ a = MyProto()
+ a.repeated_field.append('test')
+ b = MyProto()
+
+ # Repeated fields are combined
+ text_format.Parse(repr(a), b)
+ text_format.Parse(repr(a), b) # repeated_field contains ["test", "test"]
+
+ # Non-repeated fields cannot be overwritten
+ a.singular_field = 1
+ b.singular_field = 2
+ text_format.Parse(repr(a), b) # ParseError
+
+ # Binary version:
+ b.ParseFromString(a.SerializeToString()) # repeated_field is now "test"
+
+ Caller is responsible for clearing the message as needed.
+
+ Args:
+ text (str): Message text representation.
+ message (Message): A protocol buffer message to merge into.
+ allow_unknown_extension: if True, skip over missing extensions and keep
+ parsing
+ allow_field_number: if True, both field number and field name are allowed.
+ descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types.
+ allow_unknown_field: if True, skip over unknown field and keep
+ parsing. Avoid to use this option if possible. It may hide some
+ errors (e.g. spelling error on field name)
+
+ Returns:
+ Message: The same message passed as argument.
+
+ Raises:
+ ParseError: On text parsing problems.
+ """
+ return ParseLines(text.split(b'\n' if isinstance(text, bytes) else u'\n'),
+ message,
+ allow_unknown_extension,
+ allow_field_number,
+ descriptor_pool=descriptor_pool,
+ allow_unknown_field=allow_unknown_field)
+
+
+def Merge(text,
+ message,
+ allow_unknown_extension=False,
+ allow_field_number=False,
+ descriptor_pool=None,
+ allow_unknown_field=False):
+ """Parses a text representation of a protocol message into a message.
+
+ Like Parse(), but allows repeated values for a non-repeated field, and uses
+ the last one. This means any non-repeated, top-level fields specified in text
+ replace those in the message.
+
+ Args:
+ text (str): Message text representation.
+ message (Message): A protocol buffer message to merge into.
+ allow_unknown_extension: if True, skip over missing extensions and keep
+ parsing
+ allow_field_number: if True, both field number and field name are allowed.
+ descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types.
+ allow_unknown_field: if True, skip over unknown field and keep
+ parsing. Avoid to use this option if possible. It may hide some
+ errors (e.g. spelling error on field name)
+
+ Returns:
+ Message: The same message passed as argument.
+
+ Raises:
+ ParseError: On text parsing problems.
+ """
+ return MergeLines(
+ text.split(b'\n' if isinstance(text, bytes) else u'\n'),
+ message,
+ allow_unknown_extension,
+ allow_field_number,
+ descriptor_pool=descriptor_pool,
+ allow_unknown_field=allow_unknown_field)
+
+
+def ParseLines(lines,
+ message,
+ allow_unknown_extension=False,
+ allow_field_number=False,
+ descriptor_pool=None,
+ allow_unknown_field=False):
+ """Parses a text representation of a protocol message into a message.
+
+ See Parse() for caveats.
+
+ Args:
+ lines: An iterable of lines of a message's text representation.
+ message: A protocol buffer message to merge into.
+ allow_unknown_extension: if True, skip over missing extensions and keep
+ parsing
+ allow_field_number: if True, both field number and field name are allowed.
+ descriptor_pool: A DescriptorPool used to resolve Any types.
+ allow_unknown_field: if True, skip over unknown field and keep
+ parsing. Avoid to use this option if possible. It may hide some
+ errors (e.g. spelling error on field name)
+
+ Returns:
+ The same message passed as argument.
+
+ Raises:
+ ParseError: On text parsing problems.
+ """
+ parser = _Parser(allow_unknown_extension,
+ allow_field_number,
+ descriptor_pool=descriptor_pool,
+ allow_unknown_field=allow_unknown_field)
+ return parser.ParseLines(lines, message)
+
+
+def MergeLines(lines,
+ message,
+ allow_unknown_extension=False,
+ allow_field_number=False,
+ descriptor_pool=None,
+ allow_unknown_field=False):
+ """Parses a text representation of a protocol message into a message.
+
+ See Merge() for more details.
+
+ Args:
+ lines: An iterable of lines of a message's text representation.
+ message: A protocol buffer message to merge into.
+ allow_unknown_extension: if True, skip over missing extensions and keep
+ parsing
+ allow_field_number: if True, both field number and field name are allowed.
+ descriptor_pool: A DescriptorPool used to resolve Any types.
+ allow_unknown_field: if True, skip over unknown field and keep
+ parsing. Avoid to use this option if possible. It may hide some
+ errors (e.g. spelling error on field name)
+
+ Returns:
+ The same message passed as argument.
+
+ Raises:
+ ParseError: On text parsing problems.
+ """
+ parser = _Parser(allow_unknown_extension,
+ allow_field_number,
+ descriptor_pool=descriptor_pool,
+ allow_unknown_field=allow_unknown_field)
+ return parser.MergeLines(lines, message)
+
+
+class _Parser(object):
+ """Text format parser for protocol message."""
+
+ def __init__(self,
+ allow_unknown_extension=False,
+ allow_field_number=False,
+ descriptor_pool=None,
+ allow_unknown_field=False):
+ self.allow_unknown_extension = allow_unknown_extension
+ self.allow_field_number = allow_field_number
+ self.descriptor_pool = descriptor_pool
+ self.allow_unknown_field = allow_unknown_field
+
+ def ParseLines(self, lines, message):
+ """Parses a text representation of a protocol message into a message."""
+ self._allow_multiple_scalars = False
+ self._ParseOrMerge(lines, message)
+ return message
+
+ def MergeLines(self, lines, message):
+ """Merges a text representation of a protocol message into a message."""
+ self._allow_multiple_scalars = True
+ self._ParseOrMerge(lines, message)
+ return message
+
+ def _ParseOrMerge(self, lines, message):
+ """Converts a text representation of a protocol message into a message.
+
+ Args:
+ lines: Lines of a message's text representation.
+ message: A protocol buffer message to merge into.
+
+ Raises:
+ ParseError: On text parsing problems.
+ """
+ # Tokenize expects native str lines.
+ try:
+ str_lines = (
+ line if isinstance(line, str) else line.decode('utf-8')
+ for line in lines)
+ tokenizer = Tokenizer(str_lines)
+ except UnicodeDecodeError as e:
+ raise ParseError from e
+ if message:
+ self.root_type = message.DESCRIPTOR.full_name
+ while not tokenizer.AtEnd():
+ self._MergeField(tokenizer, message)
+
+ def _MergeField(self, tokenizer, message):
+ """Merges a single protocol message field into a message.
+
+ Args:
+ tokenizer: A tokenizer to parse the field name and values.
+ message: A protocol message to record the data.
+
+ Raises:
+ ParseError: In case of text parsing problems.
+ """
+ message_descriptor = message.DESCRIPTOR
+ if (message_descriptor.full_name == _ANY_FULL_TYPE_NAME and
+ tokenizer.TryConsume('[')):
+ type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer)
+ tokenizer.Consume(']')
+ tokenizer.TryConsume(':')
+ self._DetectSilentMarker(tokenizer, message_descriptor.full_name,
+ type_url_prefix + '/' + packed_type_name)
+ if tokenizer.TryConsume('<'):
+ expanded_any_end_token = '>'
+ else:
+ tokenizer.Consume('{')
+ expanded_any_end_token = '}'
+ expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name,
+ self.descriptor_pool)
+ # Direct comparison with None is used instead of implicit bool conversion
+ # to avoid false positives with falsy initial values, e.g. for
+ # google.protobuf.ListValue.
+ if expanded_any_sub_message is None:
+ raise ParseError('Type %s not found in descriptor pool' %
+ packed_type_name)
+ while not tokenizer.TryConsume(expanded_any_end_token):
+ if tokenizer.AtEnd():
+ raise tokenizer.ParseErrorPreviousToken('Expected "%s".' %
+ (expanded_any_end_token,))
+ self._MergeField(tokenizer, expanded_any_sub_message)
+ deterministic = False
+
+ message.Pack(expanded_any_sub_message,
+ type_url_prefix=type_url_prefix,
+ deterministic=deterministic)
+ return
+
+ if tokenizer.TryConsume('['):
+ name = [tokenizer.ConsumeIdentifier()]
+ while tokenizer.TryConsume('.'):
+ name.append(tokenizer.ConsumeIdentifier())
+ name = '.'.join(name)
+
+ if not message_descriptor.is_extendable:
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Message type "%s" does not have extensions.' %
+ message_descriptor.full_name)
+ # pylint: disable=protected-access
+ field = message.Extensions._FindExtensionByName(name)
+ # pylint: enable=protected-access
+ if not field:
+ if self.allow_unknown_extension:
+ field = None
+ else:
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Extension "%s" not registered. '
+ 'Did you import the _pb2 module which defines it? '
+ 'If you are trying to place the extension in the MessageSet '
+ 'field of another message that is in an Any or MessageSet field, '
+ 'that message\'s _pb2 module must be imported as well' % name)
+ elif message_descriptor != field.containing_type:
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Extension "%s" does not extend message type "%s".' %
+ (name, message_descriptor.full_name))
+
+ tokenizer.Consume(']')
+
+ else:
+ name = tokenizer.ConsumeIdentifierOrNumber()
+ if self.allow_field_number and name.isdigit():
+ number = ParseInteger(name, True, True)
+ field = message_descriptor.fields_by_number.get(number, None)
+ if not field and message_descriptor.is_extendable:
+ field = message.Extensions._FindExtensionByNumber(number)
+ else:
+ field = message_descriptor.fields_by_name.get(name, None)
+
+ # Group names are expected to be capitalized as they appear in the
+ # .proto file, which actually matches their type names, not their field
+ # names.
+ if not field:
+ field = message_descriptor.fields_by_name.get(name.lower(), None)
+ if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP:
+ field = None
+
+ if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and
+ field.message_type.name != name):
+ field = None
+
+ if not field and not self.allow_unknown_field:
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Message type "%s" has no field named "%s".' %
+ (message_descriptor.full_name, name))
+
+ if field:
+ if not self._allow_multiple_scalars and field.containing_oneof:
+ # Check if there's a different field set in this oneof.
+ # Note that we ignore the case if the same field was set before, and we
+ # apply _allow_multiple_scalars to non-scalar fields as well.
+ which_oneof = message.WhichOneof(field.containing_oneof.name)
+ if which_oneof is not None and which_oneof != field.name:
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Field "%s" is specified along with field "%s", another member '
+ 'of oneof "%s" for message type "%s".' %
+ (field.name, which_oneof, field.containing_oneof.name,
+ message_descriptor.full_name))
+
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
+ tokenizer.TryConsume(':')
+ self._DetectSilentMarker(tokenizer, message_descriptor.full_name,
+ field.full_name)
+ merger = self._MergeMessageField
+ else:
+ tokenizer.Consume(':')
+ self._DetectSilentMarker(tokenizer, message_descriptor.full_name,
+ field.full_name)
+ merger = self._MergeScalarField
+
+ if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and
+ tokenizer.TryConsume('[')):
+ # Short repeated format, e.g. "foo: [1, 2, 3]"
+ if not tokenizer.TryConsume(']'):
+ while True:
+ merger(tokenizer, message, field)
+ if tokenizer.TryConsume(']'):
+ break
+ tokenizer.Consume(',')
+
+ else:
+ merger(tokenizer, message, field)
+
+ else: # Proto field is unknown.
+ assert (self.allow_unknown_extension or self.allow_unknown_field)
+ self._SkipFieldContents(tokenizer, name, message_descriptor.full_name)
+
+ # For historical reasons, fields may optionally be separated by commas or
+ # semicolons.
+ if not tokenizer.TryConsume(','):
+ tokenizer.TryConsume(';')
+
+ def _LogSilentMarker(self, immediate_message_type, field_name):
+ pass
+
+ def _DetectSilentMarker(self, tokenizer, immediate_message_type, field_name):
+ if tokenizer.contains_silent_marker_before_current_token:
+ self._LogSilentMarker(immediate_message_type, field_name)
+
+ def _ConsumeAnyTypeUrl(self, tokenizer):
+ """Consumes a google.protobuf.Any type URL and returns the type name."""
+ # Consume "type.googleapis.com/".
+ prefix = [tokenizer.ConsumeIdentifier()]
+ tokenizer.Consume('.')
+ prefix.append(tokenizer.ConsumeIdentifier())
+ tokenizer.Consume('.')
+ prefix.append(tokenizer.ConsumeIdentifier())
+ tokenizer.Consume('/')
+ # Consume the fully-qualified type name.
+ name = [tokenizer.ConsumeIdentifier()]
+ while tokenizer.TryConsume('.'):
+ name.append(tokenizer.ConsumeIdentifier())
+ return '.'.join(prefix), '.'.join(name)
+
+ def _MergeMessageField(self, tokenizer, message, field):
+ """Merges a single scalar field into a message.
+
+ Args:
+ tokenizer: A tokenizer to parse the field value.
+ message: The message of which field is a member.
+ field: The descriptor of the field to be merged.
+
+ Raises:
+ ParseError: In case of text parsing problems.
+ """
+ is_map_entry = _IsMapEntry(field)
+
+ if tokenizer.TryConsume('<'):
+ end_token = '>'
+ else:
+ tokenizer.Consume('{')
+ end_token = '}'
+
+ if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+ if field.is_extension:
+ sub_message = message.Extensions[field].add()
+ elif is_map_entry:
+ sub_message = getattr(message, field.name).GetEntryClass()()
+ else:
+ sub_message = getattr(message, field.name).add()
+ else:
+ if field.is_extension:
+ if (not self._allow_multiple_scalars and
+ message.HasExtension(field)):
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Message type "%s" should not have multiple "%s" extensions.' %
+ (message.DESCRIPTOR.full_name, field.full_name))
+ sub_message = message.Extensions[field]
+ else:
+ # Also apply _allow_multiple_scalars to message field.
+ # TODO: Change to _allow_singular_overwrites.
+ if (not self._allow_multiple_scalars and
+ message.HasField(field.name)):
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Message type "%s" should not have multiple "%s" fields.' %
+ (message.DESCRIPTOR.full_name, field.name))
+ sub_message = getattr(message, field.name)
+ sub_message.SetInParent()
+
+ while not tokenizer.TryConsume(end_token):
+ if tokenizer.AtEnd():
+ raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,))
+ self._MergeField(tokenizer, sub_message)
+
+ if is_map_entry:
+ value_cpptype = field.message_type.fields_by_name['value'].cpp_type
+ if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
+ value = getattr(message, field.name)[sub_message.key]
+ value.CopyFrom(sub_message.value)
+ else:
+ getattr(message, field.name)[sub_message.key] = sub_message.value
+
+ def _MergeScalarField(self, tokenizer, message, field):
+ """Merges a single scalar field into a message.
+
+ Args:
+ tokenizer: A tokenizer to parse the field value.
+ message: A protocol message to record the data.
+ field: The descriptor of the field to be merged.
+
+ Raises:
+ ParseError: In case of text parsing problems.
+ RuntimeError: On runtime errors.
+ """
+ _ = self.allow_unknown_extension
+ value = None
+
+ if field.type in (descriptor.FieldDescriptor.TYPE_INT32,
+ descriptor.FieldDescriptor.TYPE_SINT32,
+ descriptor.FieldDescriptor.TYPE_SFIXED32):
+ value = _ConsumeInt32(tokenizer)
+ elif field.type in (descriptor.FieldDescriptor.TYPE_INT64,
+ descriptor.FieldDescriptor.TYPE_SINT64,
+ descriptor.FieldDescriptor.TYPE_SFIXED64):
+ value = _ConsumeInt64(tokenizer)
+ elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32,
+ descriptor.FieldDescriptor.TYPE_FIXED32):
+ value = _ConsumeUint32(tokenizer)
+ elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64,
+ descriptor.FieldDescriptor.TYPE_FIXED64):
+ value = _ConsumeUint64(tokenizer)
+ elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT,
+ descriptor.FieldDescriptor.TYPE_DOUBLE):
+ value = tokenizer.ConsumeFloat()
+ elif field.type == descriptor.FieldDescriptor.TYPE_BOOL:
+ value = tokenizer.ConsumeBool()
+ elif field.type == descriptor.FieldDescriptor.TYPE_STRING:
+ value = tokenizer.ConsumeString()
+ elif field.type == descriptor.FieldDescriptor.TYPE_BYTES:
+ value = tokenizer.ConsumeByteString()
+ elif field.type == descriptor.FieldDescriptor.TYPE_ENUM:
+ value = tokenizer.ConsumeEnum(field)
+ else:
+ raise RuntimeError('Unknown field type %d' % field.type)
+
+ if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+ if field.is_extension:
+ message.Extensions[field].append(value)
+ else:
+ getattr(message, field.name).append(value)
+ else:
+ if field.is_extension:
+ if (not self._allow_multiple_scalars and
+ field.has_presence and
+ message.HasExtension(field)):
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Message type "%s" should not have multiple "%s" extensions.' %
+ (message.DESCRIPTOR.full_name, field.full_name))
+ else:
+ message.Extensions[field] = value
+ else:
+ duplicate_error = False
+ if not self._allow_multiple_scalars:
+ if field.has_presence:
+ duplicate_error = message.HasField(field.name)
+ else:
+ # For field that doesn't represent presence, try best effort to
+ # check multiple scalars by compare to default values.
+ duplicate_error = bool(getattr(message, field.name))
+
+ if duplicate_error:
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Message type "%s" should not have multiple "%s" fields.' %
+ (message.DESCRIPTOR.full_name, field.name))
+ else:
+ setattr(message, field.name, value)
+
+ def _SkipFieldContents(self, tokenizer, field_name, immediate_message_type):
+ """Skips over contents (value or message) of a field.
+
+ Args:
+ tokenizer: A tokenizer to parse the field name and values.
+ field_name: The field name currently being parsed.
+ immediate_message_type: The type of the message immediately containing
+ the silent marker.
+ """
+ # Try to guess the type of this field.
+ # If this field is not a message, there should be a ":" between the
+ # field name and the field value and also the field value should not
+ # start with "{" or "<" which indicates the beginning of a message body.
+ # If there is no ":" or there is a "{" or "<" after ":", this field has
+ # to be a message or the input is ill-formed.
+ if tokenizer.TryConsume(
+ ':') and not tokenizer.LookingAt('{') and not tokenizer.LookingAt('<'):
+ self._DetectSilentMarker(tokenizer, immediate_message_type, field_name)
+ if tokenizer.LookingAt('['):
+ self._SkipRepeatedFieldValue(tokenizer)
+ else:
+ self._SkipFieldValue(tokenizer)
+ else:
+ self._DetectSilentMarker(tokenizer, immediate_message_type, field_name)
+ self._SkipFieldMessage(tokenizer, immediate_message_type)
+
+ def _SkipField(self, tokenizer, immediate_message_type):
+ """Skips over a complete field (name and value/message).
+
+ Args:
+ tokenizer: A tokenizer to parse the field name and values.
+ immediate_message_type: The type of the message immediately containing
+ the silent marker.
+ """
+ field_name = ''
+ if tokenizer.TryConsume('['):
+ # Consume extension or google.protobuf.Any type URL
+ field_name += '[' + tokenizer.ConsumeIdentifier()
+ num_identifiers = 1
+ while tokenizer.TryConsume('.'):
+ field_name += '.' + tokenizer.ConsumeIdentifier()
+ num_identifiers += 1
+ # This is possibly a type URL for an Any message.
+ if num_identifiers == 3 and tokenizer.TryConsume('/'):
+ field_name += '/' + tokenizer.ConsumeIdentifier()
+ while tokenizer.TryConsume('.'):
+ field_name += '.' + tokenizer.ConsumeIdentifier()
+ tokenizer.Consume(']')
+ field_name += ']'
+ else:
+ field_name += tokenizer.ConsumeIdentifierOrNumber()
+
+ self._SkipFieldContents(tokenizer, field_name, immediate_message_type)
+
+ # For historical reasons, fields may optionally be separated by commas or
+ # semicolons.
+ if not tokenizer.TryConsume(','):
+ tokenizer.TryConsume(';')
+
+ def _SkipFieldMessage(self, tokenizer, immediate_message_type):
+ """Skips over a field message.
+
+ Args:
+ tokenizer: A tokenizer to parse the field name and values.
+ immediate_message_type: The type of the message immediately containing
+ the silent marker
+ """
+ if tokenizer.TryConsume('<'):
+ delimiter = '>'
+ else:
+ tokenizer.Consume('{')
+ delimiter = '}'
+
+ while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'):
+ self._SkipField(tokenizer, immediate_message_type)
+
+ tokenizer.Consume(delimiter)
+
+ def _SkipFieldValue(self, tokenizer):
+ """Skips over a field value.
+
+ Args:
+ tokenizer: A tokenizer to parse the field name and values.
+
+ Raises:
+ ParseError: In case an invalid field value is found.
+ """
+ if (not tokenizer.TryConsumeByteString()and
+ not tokenizer.TryConsumeIdentifier() and
+ not _TryConsumeInt64(tokenizer) and
+ not _TryConsumeUint64(tokenizer) and
+ not tokenizer.TryConsumeFloat()):
+ raise ParseError('Invalid field value: ' + tokenizer.token)
+
+ def _SkipRepeatedFieldValue(self, tokenizer):
+ """Skips over a repeated field value.
+
+ Args:
+ tokenizer: A tokenizer to parse the field value.
+ """
+ tokenizer.Consume('[')
+ if not tokenizer.LookingAt(']'):
+ self._SkipFieldValue(tokenizer)
+ while tokenizer.TryConsume(','):
+ self._SkipFieldValue(tokenizer)
+ tokenizer.Consume(']')
+
+
+class Tokenizer(object):
+ """Protocol buffer text representation tokenizer.
+
+ This class handles the lower level string parsing by splitting it into
+ meaningful tokens.
+
+ It was directly ported from the Java protocol buffer API.
+ """
+
+ _WHITESPACE = re.compile(r'\s+')
+ _COMMENT = re.compile(r'(\s*#.*$)', re.MULTILINE)
+ _WHITESPACE_OR_COMMENT = re.compile(r'(\s|(#.*$))+', re.MULTILINE)
+ _TOKEN = re.compile('|'.join([
+ r'[a-zA-Z_][0-9a-zA-Z_+-]*', # an identifier
+ r'([0-9+-]|(\.[0-9]))[0-9a-zA-Z_.+-]*', # a number
+ ] + [ # quoted str for each quote mark
+ # Avoid backtracking! https://stackoverflow.com/a/844267
+ r'{qt}[^{qt}\n\\]*((\\.)+[^{qt}\n\\]*)*({qt}|\\?$)'.format(qt=mark)
+ for mark in _QUOTES
+ ]))
+
+ _IDENTIFIER = re.compile(r'[^\d\W]\w*')
+ _IDENTIFIER_OR_NUMBER = re.compile(r'\w+')
+
+ def __init__(self, lines, skip_comments=True):
+ self._position = 0
+ self._line = -1
+ self._column = 0
+ self._token_start = None
+ self.token = ''
+ self._lines = iter(lines)
+ self._current_line = ''
+ self._previous_line = 0
+ self._previous_column = 0
+ self._more_lines = True
+ self._skip_comments = skip_comments
+ self._whitespace_pattern = (skip_comments and self._WHITESPACE_OR_COMMENT
+ or self._WHITESPACE)
+ self.contains_silent_marker_before_current_token = False
+
+ self._SkipWhitespace()
+ self.NextToken()
+
+ def LookingAt(self, token):
+ return self.token == token
+
+ def AtEnd(self):
+ """Checks the end of the text was reached.
+
+ Returns:
+ True iff the end was reached.
+ """
+ return not self.token
+
+ def _PopLine(self):
+ while len(self._current_line) <= self._column:
+ try:
+ self._current_line = next(self._lines)
+ except StopIteration:
+ self._current_line = ''
+ self._more_lines = False
+ return
+ else:
+ self._line += 1
+ self._column = 0
+
+ def _SkipWhitespace(self):
+ while True:
+ self._PopLine()
+ match = self._whitespace_pattern.match(self._current_line, self._column)
+ if not match:
+ break
+ self.contains_silent_marker_before_current_token = match.group(0) == (
+ ' ' + _DEBUG_STRING_SILENT_MARKER)
+ length = len(match.group(0))
+ self._column += length
+
+ def TryConsume(self, token):
+ """Tries to consume a given piece of text.
+
+ Args:
+ token: Text to consume.
+
+ Returns:
+ True iff the text was consumed.
+ """
+ if self.token == token:
+ self.NextToken()
+ return True
+ return False
+
+ def Consume(self, token):
+ """Consumes a piece of text.
+
+ Args:
+ token: Text to consume.
+
+ Raises:
+ ParseError: If the text couldn't be consumed.
+ """
+ if not self.TryConsume(token):
+ raise self.ParseError('Expected "%s".' % token)
+
+ def ConsumeComment(self):
+ result = self.token
+ if not self._COMMENT.match(result):
+ raise self.ParseError('Expected comment.')
+ self.NextToken()
+ return result
+
+ def ConsumeCommentOrTrailingComment(self):
+ """Consumes a comment, returns a 2-tuple (trailing bool, comment str)."""
+
+ # Tokenizer initializes _previous_line and _previous_column to 0. As the
+ # tokenizer starts, it looks like there is a previous token on the line.
+ just_started = self._line == 0 and self._column == 0
+
+ before_parsing = self._previous_line
+ comment = self.ConsumeComment()
+
+ # A trailing comment is a comment on the same line than the previous token.
+ trailing = (self._previous_line == before_parsing
+ and not just_started)
+
+ return trailing, comment
+
+ def TryConsumeIdentifier(self):
+ try:
+ self.ConsumeIdentifier()
+ return True
+ except ParseError:
+ return False
+
+ def ConsumeIdentifier(self):
+ """Consumes protocol message field identifier.
+
+ Returns:
+ Identifier string.
+
+ Raises:
+ ParseError: If an identifier couldn't be consumed.
+ """
+ result = self.token
+ if not self._IDENTIFIER.match(result):
+ raise self.ParseError('Expected identifier.')
+ self.NextToken()
+ return result
+
+ def TryConsumeIdentifierOrNumber(self):
+ try:
+ self.ConsumeIdentifierOrNumber()
+ return True
+ except ParseError:
+ return False
+
+ def ConsumeIdentifierOrNumber(self):
+ """Consumes protocol message field identifier.
+
+ Returns:
+ Identifier string.
+
+ Raises:
+ ParseError: If an identifier couldn't be consumed.
+ """
+ result = self.token
+ if not self._IDENTIFIER_OR_NUMBER.match(result):
+ raise self.ParseError('Expected identifier or number, got %s.' % result)
+ self.NextToken()
+ return result
+
+ def TryConsumeInteger(self):
+ try:
+ self.ConsumeInteger()
+ return True
+ except ParseError:
+ return False
+
+ def ConsumeInteger(self):
+ """Consumes an integer number.
+
+ Returns:
+ The integer parsed.
+
+ Raises:
+ ParseError: If an integer couldn't be consumed.
+ """
+ try:
+ result = _ParseAbstractInteger(self.token)
+ except ValueError as e:
+ raise self.ParseError(str(e))
+ self.NextToken()
+ return result
+
+ def TryConsumeFloat(self):
+ try:
+ self.ConsumeFloat()
+ return True
+ except ParseError:
+ return False
+
+ def ConsumeFloat(self):
+ """Consumes an floating point number.
+
+ Returns:
+ The number parsed.
+
+ Raises:
+ ParseError: If a floating point number couldn't be consumed.
+ """
+ try:
+ result = ParseFloat(self.token)
+ except ValueError as e:
+ raise self.ParseError(str(e))
+ self.NextToken()
+ return result
+
+ def ConsumeBool(self):
+ """Consumes a boolean value.
+
+ Returns:
+ The bool parsed.
+
+ Raises:
+ ParseError: If a boolean value couldn't be consumed.
+ """
+ try:
+ result = ParseBool(self.token)
+ except ValueError as e:
+ raise self.ParseError(str(e))
+ self.NextToken()
+ return result
+
+ def TryConsumeByteString(self):
+ try:
+ self.ConsumeByteString()
+ return True
+ except ParseError:
+ return False
+
+ def ConsumeString(self):
+ """Consumes a string value.
+
+ Returns:
+ The string parsed.
+
+ Raises:
+ ParseError: If a string value couldn't be consumed.
+ """
+ the_bytes = self.ConsumeByteString()
+ try:
+ return str(the_bytes, 'utf-8')
+ except UnicodeDecodeError as e:
+ raise self._StringParseError(e)
+
+ def ConsumeByteString(self):
+ """Consumes a byte array value.
+
+ Returns:
+ The array parsed (as a string).
+
+ Raises:
+ ParseError: If a byte array value couldn't be consumed.
+ """
+ the_list = [self._ConsumeSingleByteString()]
+ while self.token and self.token[0] in _QUOTES:
+ the_list.append(self._ConsumeSingleByteString())
+ return b''.join(the_list)
+
+ def _ConsumeSingleByteString(self):
+ """Consume one token of a string literal.
+
+ String literals (whether bytes or text) can come in multiple adjacent
+ tokens which are automatically concatenated, like in C or Python. This
+ method only consumes one token.
+
+ Returns:
+ The token parsed.
+ Raises:
+ ParseError: When the wrong format data is found.
+ """
+ text = self.token
+ if len(text) < 1 or text[0] not in _QUOTES:
+ raise self.ParseError('Expected string but found: %r' % (text,))
+
+ if len(text) < 2 or text[-1] != text[0]:
+ raise self.ParseError('String missing ending quote: %r' % (text,))
+
+ try:
+ result = text_encoding.CUnescape(text[1:-1])
+ except ValueError as e:
+ raise self.ParseError(str(e))
+ self.NextToken()
+ return result
+
+ def ConsumeEnum(self, field):
+ try:
+ result = ParseEnum(field, self.token)
+ except ValueError as e:
+ raise self.ParseError(str(e))
+ self.NextToken()
+ return result
+
+ def ParseErrorPreviousToken(self, message):
+ """Creates and *returns* a ParseError for the previously read token.
+
+ Args:
+ message: A message to set for the exception.
+
+ Returns:
+ A ParseError instance.
+ """
+ return ParseError(message, self._previous_line + 1,
+ self._previous_column + 1)
+
+ def ParseError(self, message):
+ """Creates and *returns* a ParseError for the current token."""
+ return ParseError('\'' + self._current_line + '\': ' + message,
+ self._line + 1, self._column + 1)
+
+ def _StringParseError(self, e):
+ return self.ParseError('Couldn\'t parse string: ' + str(e))
+
+ def NextToken(self):
+ """Reads the next meaningful token."""
+ self._previous_line = self._line
+ self._previous_column = self._column
+ self.contains_silent_marker_before_current_token = False
+
+ self._column += len(self.token)
+ self._SkipWhitespace()
+
+ if not self._more_lines:
+ self.token = ''
+ return
+
+ match = self._TOKEN.match(self._current_line, self._column)
+ if not match and not self._skip_comments:
+ match = self._COMMENT.match(self._current_line, self._column)
+ if match:
+ token = match.group(0)
+ self.token = token
+ else:
+ self.token = self._current_line[self._column]
+
+# Aliased so it can still be accessed by current visibility violators.
+# TODO: Migrate violators to textformat_tokenizer.
+_Tokenizer = Tokenizer # pylint: disable=invalid-name
+
+
+def _ConsumeInt32(tokenizer):
+ """Consumes a signed 32bit integer number from tokenizer.
+
+ Args:
+ tokenizer: A tokenizer used to parse the number.
+
+ Returns:
+ The integer parsed.
+
+ Raises:
+ ParseError: If a signed 32bit integer couldn't be consumed.
+ """
+ return _ConsumeInteger(tokenizer, is_signed=True, is_long=False)
+
+
+def _ConsumeUint32(tokenizer):
+ """Consumes an unsigned 32bit integer number from tokenizer.
+
+ Args:
+ tokenizer: A tokenizer used to parse the number.
+
+ Returns:
+ The integer parsed.
+
+ Raises:
+ ParseError: If an unsigned 32bit integer couldn't be consumed.
+ """
+ return _ConsumeInteger(tokenizer, is_signed=False, is_long=False)
+
+
+def _TryConsumeInt64(tokenizer):
+ try:
+ _ConsumeInt64(tokenizer)
+ return True
+ except ParseError:
+ return False
+
+
+def _ConsumeInt64(tokenizer):
+ """Consumes a signed 32bit integer number from tokenizer.
+
+ Args:
+ tokenizer: A tokenizer used to parse the number.
+
+ Returns:
+ The integer parsed.
+
+ Raises:
+ ParseError: If a signed 32bit integer couldn't be consumed.
+ """
+ return _ConsumeInteger(tokenizer, is_signed=True, is_long=True)
+
+
+def _TryConsumeUint64(tokenizer):
+ try:
+ _ConsumeUint64(tokenizer)
+ return True
+ except ParseError:
+ return False
+
+
+def _ConsumeUint64(tokenizer):
+ """Consumes an unsigned 64bit integer number from tokenizer.
+
+ Args:
+ tokenizer: A tokenizer used to parse the number.
+
+ Returns:
+ The integer parsed.
+
+ Raises:
+ ParseError: If an unsigned 64bit integer couldn't be consumed.
+ """
+ return _ConsumeInteger(tokenizer, is_signed=False, is_long=True)
+
+
+def _ConsumeInteger(tokenizer, is_signed=False, is_long=False):
+ """Consumes an integer number from tokenizer.
+
+ Args:
+ tokenizer: A tokenizer used to parse the number.
+ is_signed: True if a signed integer must be parsed.
+ is_long: True if a long integer must be parsed.
+
+ Returns:
+ The integer parsed.
+
+ Raises:
+ ParseError: If an integer with given characteristics couldn't be consumed.
+ """
+ try:
+ result = ParseInteger(tokenizer.token, is_signed=is_signed, is_long=is_long)
+ except ValueError as e:
+ raise tokenizer.ParseError(str(e))
+ tokenizer.NextToken()
+ return result
+
+
+def ParseInteger(text, is_signed=False, is_long=False):
+ """Parses an integer.
+
+ Args:
+ text: The text to parse.
+ is_signed: True if a signed integer must be parsed.
+ is_long: True if a long integer must be parsed.
+
+ Returns:
+ The integer value.
+
+ Raises:
+ ValueError: Thrown Iff the text is not a valid integer.
+ """
+ # Do the actual parsing. Exception handling is propagated to caller.
+ result = _ParseAbstractInteger(text)
+
+ # Check if the integer is sane. Exceptions handled by callers.
+ checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)]
+ checker.CheckValue(result)
+ return result
+
+
+def _ParseAbstractInteger(text):
+ """Parses an integer without checking size/signedness.
+
+ Args:
+ text: The text to parse.
+
+ Returns:
+ The integer value.
+
+ Raises:
+ ValueError: Thrown Iff the text is not a valid integer.
+ """
+ # Do the actual parsing. Exception handling is propagated to caller.
+ orig_text = text
+ c_octal_match = re.match(r'(-?)0(\d+)$', text)
+ if c_octal_match:
+ # Python 3 no longer supports 0755 octal syntax without the 'o', so
+ # we always use the '0o' prefix for multi-digit numbers starting with 0.
+ text = c_octal_match.group(1) + '0o' + c_octal_match.group(2)
+ try:
+ return int(text, 0)
+ except ValueError:
+ raise ValueError('Couldn\'t parse integer: %s' % orig_text)
+
+
+def ParseFloat(text):
+ """Parse a floating point number.
+
+ Args:
+ text: Text to parse.
+
+ Returns:
+ The number parsed.
+
+ Raises:
+ ValueError: If a floating point number couldn't be parsed.
+ """
+ try:
+ # Assume Python compatible syntax.
+ return float(text)
+ except ValueError:
+ # Check alternative spellings.
+ if _FLOAT_INFINITY.match(text):
+ if text[0] == '-':
+ return float('-inf')
+ else:
+ return float('inf')
+ elif _FLOAT_NAN.match(text):
+ return float('nan')
+ else:
+ # assume '1.0f' format
+ try:
+ return float(text.rstrip('f'))
+ except ValueError:
+ raise ValueError('Couldn\'t parse float: %s' % text)
+
+
+def ParseBool(text):
+ """Parse a boolean value.
+
+ Args:
+ text: Text to parse.
+
+ Returns:
+ Boolean values parsed
+
+ Raises:
+ ValueError: If text is not a valid boolean.
+ """
+ if text in ('true', 't', '1', 'True'):
+ return True
+ elif text in ('false', 'f', '0', 'False'):
+ return False
+ else:
+ raise ValueError('Expected "true" or "false".')
+
+
+def ParseEnum(field, value):
+ """Parse an enum value.
+
+ The value can be specified by a number (the enum value), or by
+ a string literal (the enum name).
+
+ Args:
+ field: Enum field descriptor.
+ value: String value.
+
+ Returns:
+ Enum value number.
+
+ Raises:
+ ValueError: If the enum value could not be parsed.
+ """
+ enum_descriptor = field.enum_type
+ try:
+ number = int(value, 0)
+ except ValueError:
+ # Identifier.
+ enum_value = enum_descriptor.values_by_name.get(value, None)
+ if enum_value is None:
+ raise ValueError('Enum type "%s" has no value named %s.' %
+ (enum_descriptor.full_name, value))
+ else:
+ if not field.enum_type.is_closed:
+ return number
+ enum_value = enum_descriptor.values_by_number.get(number, None)
+ if enum_value is None:
+ raise ValueError('Enum type "%s" has no value with number %d.' %
+ (enum_descriptor.full_name, number))
+ return enum_value.number
diff --git a/Lib/site-packages/google/protobuf/timestamp_pb2.py b/Lib/site-packages/google/protobuf/timestamp_pb2.py
new file mode 100644
index 0000000..6dca9cf
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/timestamp_pb2.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/timestamp.proto
+# Protobuf Python Version: 4.25.2
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\";\n\tTimestamp\x12\x18\n\x07seconds\x18\x01 \x01(\x03R\x07seconds\x12\x14\n\x05nanos\x18\x02 \x01(\x05R\x05nanosB\x85\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z2google.golang.org/protobuf/types/known/timestamppb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.timestamp_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ _globals['DESCRIPTOR']._options = None
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\016TimestampProtoP\001Z2google.golang.org/protobuf/types/known/timestamppb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _globals['_TIMESTAMP']._serialized_start=52
+ _globals['_TIMESTAMP']._serialized_end=111
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/protobuf/type_pb2.py b/Lib/site-packages/google/protobuf/type_pb2.py
new file mode 100644
index 0000000..0891f9b
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/type_pb2.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/type.proto
+# Protobuf Python Version: 4.25.2
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
+from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xa7\x02\n\x04Type\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12.\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.FieldR\x06\x66ields\x12\x16\n\x06oneofs\x18\x03 \x03(\tR\x06oneofs\x12\x31\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.OptionR\x07options\x12\x45\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContextR\rsourceContext\x12/\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.SyntaxR\x06syntax\x12\x18\n\x07\x65\x64ition\x18\x07 \x01(\tR\x07\x65\x64ition\"\xb4\x06\n\x05\x46ield\x12/\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.KindR\x04kind\x12\x44\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.CardinalityR\x0b\x63\x61rdinality\x12\x16\n\x06number\x18\x03 \x01(\x05R\x06number\x12\x12\n\x04name\x18\x04 \x01(\tR\x04name\x12\x19\n\x08type_url\x18\x06 \x01(\tR\x07typeUrl\x12\x1f\n\x0boneof_index\x18\x07 \x01(\x05R\noneofIndex\x12\x16\n\x06packed\x18\x08 \x01(\x08R\x06packed\x12\x31\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.OptionR\x07options\x12\x1b\n\tjson_name\x18\n \x01(\tR\x08jsonName\x12#\n\rdefault_value\x18\x0b \x01(\tR\x0c\x64\x65\x66\x61ultValue\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\x99\x02\n\x04\x45num\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x38\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValueR\tenumvalue\x12\x31\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.OptionR\x07options\x12\x45\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContextR\rsourceContext\x12/\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.SyntaxR\x06syntax\x12\x18\n\x07\x65\x64ition\x18\x06 \x01(\tR\x07\x65\x64ition\"j\n\tEnumValue\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x16\n\x06number\x18\x02 \x01(\x05R\x06number\x12\x31\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.OptionR\x07options\"H\n\x06Option\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12*\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyR\x05value*C\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x12\x13\n\x0fSYNTAX_EDITIONS\x10\x02\x42{\n\x13\x63om.google.protobufB\tTypeProtoP\x01Z-google.golang.org/protobuf/types/known/typepb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.type_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ _globals['DESCRIPTOR']._options = None
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\tTypeProtoP\001Z-google.golang.org/protobuf/types/known/typepb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _globals['_SYNTAX']._serialized_start=1699
+ _globals['_SYNTAX']._serialized_end=1766
+ _globals['_TYPE']._serialized_start=113
+ _globals['_TYPE']._serialized_end=408
+ _globals['_FIELD']._serialized_start=411
+ _globals['_FIELD']._serialized_end=1231
+ _globals['_FIELD_KIND']._serialized_start=785
+ _globals['_FIELD_KIND']._serialized_end=1113
+ _globals['_FIELD_CARDINALITY']._serialized_start=1115
+ _globals['_FIELD_CARDINALITY']._serialized_end=1231
+ _globals['_ENUM']._serialized_start=1234
+ _globals['_ENUM']._serialized_end=1515
+ _globals['_ENUMVALUE']._serialized_start=1517
+ _globals['_ENUMVALUE']._serialized_end=1623
+ _globals['_OPTION']._serialized_start=1625
+ _globals['_OPTION']._serialized_end=1697
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/protobuf/unknown_fields.py b/Lib/site-packages/google/protobuf/unknown_fields.py
new file mode 100644
index 0000000..9b1e549
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/unknown_fields.py
@@ -0,0 +1,97 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Contains Unknown Fields APIs.
+
+Simple usage example:
+ unknown_field_set = UnknownFieldSet(message)
+ for unknown_field in unknown_field_set:
+ wire_type = unknown_field.wire_type
+ field_number = unknown_field.field_number
+ data = unknown_field.data
+"""
+
+
+from google.protobuf.internal import api_implementation
+
+if api_implementation._c_module is not None: # pylint: disable=protected-access
+ UnknownFieldSet = api_implementation._c_module.UnknownFieldSet # pylint: disable=protected-access
+else:
+ from google.protobuf.internal import decoder # pylint: disable=g-import-not-at-top
+ from google.protobuf.internal import wire_format # pylint: disable=g-import-not-at-top
+
+ class UnknownField:
+ """A parsed unknown field."""
+
+ # Disallows assignment to other attributes.
+ __slots__ = ['_field_number', '_wire_type', '_data']
+
+ def __init__(self, field_number, wire_type, data):
+ self._field_number = field_number
+ self._wire_type = wire_type
+ self._data = data
+ return
+
+ @property
+ def field_number(self):
+ return self._field_number
+
+ @property
+ def wire_type(self):
+ return self._wire_type
+
+ @property
+ def data(self):
+ return self._data
+
+ class UnknownFieldSet:
+ """UnknownField container."""
+
+ # Disallows assignment to other attributes.
+ __slots__ = ['_values']
+
+ def __init__(self, msg):
+
+ def InternalAdd(field_number, wire_type, data):
+ unknown_field = UnknownField(field_number, wire_type, data)
+ self._values.append(unknown_field)
+
+ self._values = []
+ msg_des = msg.DESCRIPTOR
+ # pylint: disable=protected-access
+ unknown_fields = msg._unknown_fields
+ if (msg_des.has_options and
+ msg_des.GetOptions().message_set_wire_format):
+ local_decoder = decoder.UnknownMessageSetItemDecoder()
+ for _, buffer in unknown_fields:
+ (field_number, data) = local_decoder(memoryview(buffer))
+ InternalAdd(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED, data)
+ else:
+ for tag_bytes, buffer in unknown_fields:
+ # pylint: disable=protected-access
+ (tag, _) = decoder._DecodeVarint(tag_bytes, 0)
+ field_number, wire_type = wire_format.UnpackTag(tag)
+ if field_number == 0:
+ raise RuntimeError('Field number 0 is illegal.')
+ (data, _) = decoder._DecodeUnknownField(
+ memoryview(buffer), 0, wire_type)
+ InternalAdd(field_number, wire_type, data)
+
+ def __getitem__(self, index):
+ size = len(self._values)
+ if index < 0:
+ index += size
+ if index < 0 or index >= size:
+ raise IndexError('index %d out of range'.index)
+
+ return self._values[index]
+
+ def __len__(self):
+ return len(self._values)
+
+ def __iter__(self):
+ return iter(self._values)
diff --git a/Lib/site-packages/google/protobuf/util/__init__.py b/Lib/site-packages/google/protobuf/util/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/google/protobuf/wrappers_pb2.py b/Lib/site-packages/google/protobuf/wrappers_pb2.py
new file mode 100644
index 0000000..21e480c
--- /dev/null
+++ b/Lib/site-packages/google/protobuf/wrappers_pb2.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/wrappers.proto
+# Protobuf Python Version: 4.25.2
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"#\n\x0b\x44oubleValue\x12\x14\n\x05value\x18\x01 \x01(\x01R\x05value\"\"\n\nFloatValue\x12\x14\n\x05value\x18\x01 \x01(\x02R\x05value\"\"\n\nInt64Value\x12\x14\n\x05value\x18\x01 \x01(\x03R\x05value\"#\n\x0bUInt64Value\x12\x14\n\x05value\x18\x01 \x01(\x04R\x05value\"\"\n\nInt32Value\x12\x14\n\x05value\x18\x01 \x01(\x05R\x05value\"#\n\x0bUInt32Value\x12\x14\n\x05value\x18\x01 \x01(\rR\x05value\"!\n\tBoolValue\x12\x14\n\x05value\x18\x01 \x01(\x08R\x05value\"#\n\x0bStringValue\x12\x14\n\x05value\x18\x01 \x01(\tR\x05value\"\"\n\nBytesValue\x12\x14\n\x05value\x18\x01 \x01(\x0cR\x05valueB\x83\x01\n\x13\x63om.google.protobufB\rWrappersProtoP\x01Z1google.golang.org/protobuf/types/known/wrapperspb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.wrappers_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ _globals['DESCRIPTOR']._options = None
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\rWrappersProtoP\001Z1google.golang.org/protobuf/types/known/wrapperspb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _globals['_DOUBLEVALUE']._serialized_start=51
+ _globals['_DOUBLEVALUE']._serialized_end=86
+ _globals['_FLOATVALUE']._serialized_start=88
+ _globals['_FLOATVALUE']._serialized_end=122
+ _globals['_INT64VALUE']._serialized_start=124
+ _globals['_INT64VALUE']._serialized_end=158
+ _globals['_UINT64VALUE']._serialized_start=160
+ _globals['_UINT64VALUE']._serialized_end=195
+ _globals['_INT32VALUE']._serialized_start=197
+ _globals['_INT32VALUE']._serialized_end=231
+ _globals['_UINT32VALUE']._serialized_start=233
+ _globals['_UINT32VALUE']._serialized_end=268
+ _globals['_BOOLVALUE']._serialized_start=270
+ _globals['_BOOLVALUE']._serialized_end=303
+ _globals['_STRINGVALUE']._serialized_start=305
+ _globals['_STRINGVALUE']._serialized_end=340
+ _globals['_BYTESVALUE']._serialized_start=342
+ _globals['_BYTESVALUE']._serialized_end=376
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/rpc/code.proto b/Lib/site-packages/google/rpc/code.proto
new file mode 100644
index 0000000..7c810af
--- /dev/null
+++ b/Lib/site-packages/google/rpc/code.proto
@@ -0,0 +1,186 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.rpc;
+
+option go_package = "google.golang.org/genproto/googleapis/rpc/code;code";
+option java_multiple_files = true;
+option java_outer_classname = "CodeProto";
+option java_package = "com.google.rpc";
+option objc_class_prefix = "RPC";
+
+// The canonical error codes for gRPC APIs.
+//
+//
+// Sometimes multiple error codes may apply. Services should return
+// the most specific error code that applies. For example, prefer
+// `OUT_OF_RANGE` over `FAILED_PRECONDITION` if both codes apply.
+// Similarly prefer `NOT_FOUND` or `ALREADY_EXISTS` over `FAILED_PRECONDITION`.
+enum Code {
+ // Not an error; returned on success.
+ //
+ // HTTP Mapping: 200 OK
+ OK = 0;
+
+ // The operation was cancelled, typically by the caller.
+ //
+ // HTTP Mapping: 499 Client Closed Request
+ CANCELLED = 1;
+
+ // Unknown error. For example, this error may be returned when
+ // a `Status` value received from another address space belongs to
+ // an error space that is not known in this address space. Also
+ // errors raised by APIs that do not return enough error information
+ // may be converted to this error.
+ //
+ // HTTP Mapping: 500 Internal Server Error
+ UNKNOWN = 2;
+
+ // The client specified an invalid argument. Note that this differs
+ // from `FAILED_PRECONDITION`. `INVALID_ARGUMENT` indicates arguments
+ // that are problematic regardless of the state of the system
+ // (e.g., a malformed file name).
+ //
+ // HTTP Mapping: 400 Bad Request
+ INVALID_ARGUMENT = 3;
+
+ // The deadline expired before the operation could complete. For operations
+ // that change the state of the system, this error may be returned
+ // even if the operation has completed successfully. For example, a
+ // successful response from a server could have been delayed long
+ // enough for the deadline to expire.
+ //
+ // HTTP Mapping: 504 Gateway Timeout
+ DEADLINE_EXCEEDED = 4;
+
+ // Some requested entity (e.g., file or directory) was not found.
+ //
+ // Note to server developers: if a request is denied for an entire class
+ // of users, such as gradual feature rollout or undocumented allowlist,
+ // `NOT_FOUND` may be used. If a request is denied for some users within
+ // a class of users, such as user-based access control, `PERMISSION_DENIED`
+ // must be used.
+ //
+ // HTTP Mapping: 404 Not Found
+ NOT_FOUND = 5;
+
+ // The entity that a client attempted to create (e.g., file or directory)
+ // already exists.
+ //
+ // HTTP Mapping: 409 Conflict
+ ALREADY_EXISTS = 6;
+
+ // The caller does not have permission to execute the specified
+ // operation. `PERMISSION_DENIED` must not be used for rejections
+ // caused by exhausting some resource (use `RESOURCE_EXHAUSTED`
+ // instead for those errors). `PERMISSION_DENIED` must not be
+ // used if the caller can not be identified (use `UNAUTHENTICATED`
+ // instead for those errors). This error code does not imply the
+ // request is valid or the requested entity exists or satisfies
+ // other pre-conditions.
+ //
+ // HTTP Mapping: 403 Forbidden
+ PERMISSION_DENIED = 7;
+
+ // The request does not have valid authentication credentials for the
+ // operation.
+ //
+ // HTTP Mapping: 401 Unauthorized
+ UNAUTHENTICATED = 16;
+
+ // Some resource has been exhausted, perhaps a per-user quota, or
+ // perhaps the entire file system is out of space.
+ //
+ // HTTP Mapping: 429 Too Many Requests
+ RESOURCE_EXHAUSTED = 8;
+
+ // The operation was rejected because the system is not in a state
+ // required for the operation's execution. For example, the directory
+ // to be deleted is non-empty, an rmdir operation is applied to
+ // a non-directory, etc.
+ //
+ // Service implementors can use the following guidelines to decide
+ // between `FAILED_PRECONDITION`, `ABORTED`, and `UNAVAILABLE`:
+ // (a) Use `UNAVAILABLE` if the client can retry just the failing call.
+ // (b) Use `ABORTED` if the client should retry at a higher level. For
+ // example, when a client-specified test-and-set fails, indicating the
+ // client should restart a read-modify-write sequence.
+ // (c) Use `FAILED_PRECONDITION` if the client should not retry until
+ // the system state has been explicitly fixed. For example, if an "rmdir"
+ // fails because the directory is non-empty, `FAILED_PRECONDITION`
+ // should be returned since the client should not retry unless
+ // the files are deleted from the directory.
+ //
+ // HTTP Mapping: 400 Bad Request
+ FAILED_PRECONDITION = 9;
+
+ // The operation was aborted, typically due to a concurrency issue such as
+ // a sequencer check failure or transaction abort.
+ //
+ // See the guidelines above for deciding between `FAILED_PRECONDITION`,
+ // `ABORTED`, and `UNAVAILABLE`.
+ //
+ // HTTP Mapping: 409 Conflict
+ ABORTED = 10;
+
+ // The operation was attempted past the valid range. E.g., seeking or
+ // reading past end-of-file.
+ //
+ // Unlike `INVALID_ARGUMENT`, this error indicates a problem that may
+ // be fixed if the system state changes. For example, a 32-bit file
+ // system will generate `INVALID_ARGUMENT` if asked to read at an
+ // offset that is not in the range [0,2^32-1], but it will generate
+ // `OUT_OF_RANGE` if asked to read from an offset past the current
+ // file size.
+ //
+ // There is a fair bit of overlap between `FAILED_PRECONDITION` and
+ // `OUT_OF_RANGE`. We recommend using `OUT_OF_RANGE` (the more specific
+ // error) when it applies so that callers who are iterating through
+ // a space can easily look for an `OUT_OF_RANGE` error to detect when
+ // they are done.
+ //
+ // HTTP Mapping: 400 Bad Request
+ OUT_OF_RANGE = 11;
+
+ // The operation is not implemented or is not supported/enabled in this
+ // service.
+ //
+ // HTTP Mapping: 501 Not Implemented
+ UNIMPLEMENTED = 12;
+
+ // Internal errors. This means that some invariants expected by the
+ // underlying system have been broken. This error code is reserved
+ // for serious errors.
+ //
+ // HTTP Mapping: 500 Internal Server Error
+ INTERNAL = 13;
+
+ // The service is currently unavailable. This is most likely a
+ // transient condition, which can be corrected by retrying with
+ // a backoff. Note that it is not always safe to retry
+ // non-idempotent operations.
+ //
+ // See the guidelines above for deciding between `FAILED_PRECONDITION`,
+ // `ABORTED`, and `UNAVAILABLE`.
+ //
+ // HTTP Mapping: 503 Service Unavailable
+ UNAVAILABLE = 14;
+
+ // Unrecoverable data loss or corruption.
+ //
+ // HTTP Mapping: 500 Internal Server Error
+ DATA_LOSS = 15;
+}
diff --git a/Lib/site-packages/google/rpc/code_pb2.py b/Lib/site-packages/google/rpc/code_pb2.py
new file mode 100644
index 0000000..5952dbf
--- /dev/null
+++ b/Lib/site-packages/google/rpc/code_pb2.py
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/rpc/code.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b"\n\x15google/rpc/code.proto\x12\ngoogle.rpc*\xb7\x02\n\x04\x43ode\x12\x06\n\x02OK\x10\x00\x12\r\n\tCANCELLED\x10\x01\x12\x0b\n\x07UNKNOWN\x10\x02\x12\x14\n\x10INVALID_ARGUMENT\x10\x03\x12\x15\n\x11\x44\x45\x41\x44LINE_EXCEEDED\x10\x04\x12\r\n\tNOT_FOUND\x10\x05\x12\x12\n\x0e\x41LREADY_EXISTS\x10\x06\x12\x15\n\x11PERMISSION_DENIED\x10\x07\x12\x13\n\x0fUNAUTHENTICATED\x10\x10\x12\x16\n\x12RESOURCE_EXHAUSTED\x10\x08\x12\x17\n\x13\x46\x41ILED_PRECONDITION\x10\t\x12\x0b\n\x07\x41\x42ORTED\x10\n\x12\x10\n\x0cOUT_OF_RANGE\x10\x0b\x12\x11\n\rUNIMPLEMENTED\x10\x0c\x12\x0c\n\x08INTERNAL\x10\r\x12\x0f\n\x0bUNAVAILABLE\x10\x0e\x12\r\n\tDATA_LOSS\x10\x0f\x42X\n\x0e\x63om.google.rpcB\tCodeProtoP\x01Z3google.golang.org/genproto/googleapis/rpc/code;code\xa2\x02\x03RPCb\x06proto3"
+)
+
+_CODE = DESCRIPTOR.enum_types_by_name["Code"]
+Code = enum_type_wrapper.EnumTypeWrapper(_CODE)
+OK = 0
+CANCELLED = 1
+UNKNOWN = 2
+INVALID_ARGUMENT = 3
+DEADLINE_EXCEEDED = 4
+NOT_FOUND = 5
+ALREADY_EXISTS = 6
+PERMISSION_DENIED = 7
+UNAUTHENTICATED = 16
+RESOURCE_EXHAUSTED = 8
+FAILED_PRECONDITION = 9
+ABORTED = 10
+OUT_OF_RANGE = 11
+UNIMPLEMENTED = 12
+INTERNAL = 13
+UNAVAILABLE = 14
+DATA_LOSS = 15
+
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.rpcB\tCodeProtoP\001Z3google.golang.org/genproto/googleapis/rpc/code;code\242\002\003RPC"
+ _CODE._serialized_start = 38
+ _CODE._serialized_end = 349
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/rpc/context/attribute_context.proto b/Lib/site-packages/google/rpc/context/attribute_context.proto
new file mode 100644
index 0000000..ef9242e
--- /dev/null
+++ b/Lib/site-packages/google/rpc/context/attribute_context.proto
@@ -0,0 +1,344 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.rpc.context;
+
+import "google/protobuf/any.proto";
+import "google/protobuf/duration.proto";
+import "google/protobuf/struct.proto";
+import "google/protobuf/timestamp.proto";
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/rpc/context/attribute_context;attribute_context";
+option java_multiple_files = true;
+option java_outer_classname = "AttributeContextProto";
+option java_package = "com.google.rpc.context";
+
+// This message defines the standard attribute vocabulary for Google APIs.
+//
+// An attribute is a piece of metadata that describes an activity on a network
+// service. For example, the size of an HTTP request, or the status code of
+// an HTTP response.
+//
+// Each attribute has a type and a name, which is logically defined as
+// a proto message field in `AttributeContext`. The field type becomes the
+// attribute type, and the field path becomes the attribute name. For example,
+// the attribute `source.ip` maps to field `AttributeContext.source.ip`.
+//
+// This message definition is guaranteed not to have any wire breaking change.
+// So you can use it directly for passing attributes across different systems.
+//
+// NOTE: Different system may generate different subset of attributes. Please
+// verify the system specification before relying on an attribute generated
+// a system.
+message AttributeContext {
+ // This message defines attributes for a node that handles a network request.
+ // The node can be either a service or an application that sends, forwards,
+ // or receives the request. Service peers should fill in
+ // `principal` and `labels` as appropriate.
+ message Peer {
+ // The IP address of the peer.
+ string ip = 1;
+
+ // The network port of the peer.
+ int64 port = 2;
+
+ // The labels associated with the peer.
+ map labels = 6;
+
+ // The identity of this peer. Similar to `Request.auth.principal`, but
+ // relative to the peer instead of the request. For example, the
+ // identity associated with a load balancer that forwarded the request.
+ string principal = 7;
+
+ // The CLDR country/region code associated with the above IP address.
+ // If the IP address is private, the `region_code` should reflect the
+ // physical location where this peer is running.
+ string region_code = 8;
+ }
+
+ // This message defines attributes associated with API operations, such as
+ // a network API request. The terminology is based on the conventions used
+ // by Google APIs, Istio, and OpenAPI.
+ message Api {
+ // The API service name. It is a logical identifier for a networked API,
+ // such as "pubsub.googleapis.com". The naming syntax depends on the
+ // API management system being used for handling the request.
+ string service = 1;
+
+ // The API operation name. For gRPC requests, it is the fully qualified API
+ // method name, such as "google.pubsub.v1.Publisher.Publish". For OpenAPI
+ // requests, it is the `operationId`, such as "getPet".
+ string operation = 2;
+
+ // The API protocol used for sending the request, such as "http", "https",
+ // "grpc", or "internal".
+ string protocol = 3;
+
+ // The API version associated with the API operation above, such as "v1" or
+ // "v1alpha1".
+ string version = 4;
+ }
+
+ // This message defines request authentication attributes. Terminology is
+ // based on the JSON Web Token (JWT) standard, but the terms also
+ // correlate to concepts in other standards.
+ message Auth {
+ // The authenticated principal. Reflects the issuer (`iss`) and subject
+ // (`sub`) claims within a JWT. The issuer and subject should be `/`
+ // delimited, with `/` percent-encoded within the subject fragment. For
+ // Google accounts, the principal format is:
+ // "https://accounts.google.com/{id}"
+ string principal = 1;
+
+ // The intended audience(s) for this authentication information. Reflects
+ // the audience (`aud`) claim within a JWT. The audience
+ // value(s) depends on the `issuer`, but typically include one or more of
+ // the following pieces of information:
+ //
+ // * The services intended to receive the credential. For example,
+ // ["https://pubsub.googleapis.com/", "https://storage.googleapis.com/"].
+ // * A set of service-based scopes. For example,
+ // ["https://www.googleapis.com/auth/cloud-platform"].
+ // * The client id of an app, such as the Firebase project id for JWTs
+ // from Firebase Auth.
+ //
+ // Consult the documentation for the credential issuer to determine the
+ // information provided.
+ repeated string audiences = 2;
+
+ // The authorized presenter of the credential. Reflects the optional
+ // Authorized Presenter (`azp`) claim within a JWT or the
+ // OAuth client id. For example, a Google Cloud Platform client id looks
+ // as follows: "123456789012.apps.googleusercontent.com".
+ string presenter = 3;
+
+ // Structured claims presented with the credential. JWTs include
+ // `{key: value}` pairs for standard and private claims. The following
+ // is a subset of the standard required and optional claims that would
+ // typically be presented for a Google-based JWT:
+ //
+ // {'iss': 'accounts.google.com',
+ // 'sub': '113289723416554971153',
+ // 'aud': ['123456789012', 'pubsub.googleapis.com'],
+ // 'azp': '123456789012.apps.googleusercontent.com',
+ // 'email': 'jsmith@example.com',
+ // 'iat': 1353601026,
+ // 'exp': 1353604926}
+ //
+ // SAML assertions are similarly specified, but with an identity provider
+ // dependent structure.
+ google.protobuf.Struct claims = 4;
+
+ // A list of access level resource names that allow resources to be
+ // accessed by authenticated requester. It is part of Secure GCP processing
+ // for the incoming request. An access level string has the format:
+ // "//{api_service_name}/accessPolicies/{policy_id}/accessLevels/{short_name}"
+ //
+ // Example:
+ // "//accesscontextmanager.googleapis.com/accessPolicies/MY_POLICY_ID/accessLevels/MY_LEVEL"
+ repeated string access_levels = 5;
+ }
+
+ // This message defines attributes for an HTTP request. If the actual
+ // request is not an HTTP request, the runtime system should try to map
+ // the actual request to an equivalent HTTP request.
+ message Request {
+ // The unique ID for a request, which can be propagated to downstream
+ // systems. The ID should have low probability of collision
+ // within a single day for a specific service.
+ string id = 1;
+
+ // The HTTP request method, such as `GET`, `POST`.
+ string method = 2;
+
+ // The HTTP request headers. If multiple headers share the same key, they
+ // must be merged according to the HTTP spec. All header keys must be
+ // lowercased, because HTTP header keys are case-insensitive.
+ map headers = 3;
+
+ // The HTTP URL path, excluding the query parameters.
+ string path = 4;
+
+ // The HTTP request `Host` header value.
+ string host = 5;
+
+ // The HTTP URL scheme, such as `http` and `https`.
+ string scheme = 6;
+
+ // The HTTP URL query in the format of `name1=value1&name2=value2`, as it
+ // appears in the first line of the HTTP request. No decoding is performed.
+ string query = 7;
+
+ // The timestamp when the `destination` service receives the last byte of
+ // the request.
+ google.protobuf.Timestamp time = 9;
+
+ // The HTTP request size in bytes. If unknown, it must be -1.
+ int64 size = 10;
+
+ // The network protocol used with the request, such as "http/1.1",
+ // "spdy/3", "h2", "h2c", "webrtc", "tcp", "udp", "quic". See
+ // https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids
+ // for details.
+ string protocol = 11;
+
+ // A special parameter for request reason. It is used by security systems
+ // to associate auditing information with a request.
+ string reason = 12;
+
+ // The request authentication. May be absent for unauthenticated requests.
+ // Derived from the HTTP request `Authorization` header or equivalent.
+ Auth auth = 13;
+ }
+
+ // This message defines attributes for a typical network response. It
+ // generally models semantics of an HTTP response.
+ message Response {
+ // The HTTP response status code, such as `200` and `404`.
+ int64 code = 1;
+
+ // The HTTP response size in bytes. If unknown, it must be -1.
+ int64 size = 2;
+
+ // The HTTP response headers. If multiple headers share the same key, they
+ // must be merged according to HTTP spec. All header keys must be
+ // lowercased, because HTTP header keys are case-insensitive.
+ map headers = 3;
+
+ // The timestamp when the `destination` service sends the last byte of
+ // the response.
+ google.protobuf.Timestamp time = 4;
+
+ // The amount of time it takes the backend service to fully respond to a
+ // request. Measured from when the destination service starts to send the
+ // request to the backend until when the destination service receives the
+ // complete response from the backend.
+ google.protobuf.Duration backend_latency = 5;
+ }
+
+ // This message defines core attributes for a resource. A resource is an
+ // addressable (named) entity provided by the destination service. For
+ // example, a file stored on a network storage service.
+ message Resource {
+ // The name of the service that this resource belongs to, such as
+ // `pubsub.googleapis.com`. The service may be different from the DNS
+ // hostname that actually serves the request.
+ string service = 1;
+
+ // The stable identifier (name) of a resource on the `service`. A resource
+ // can be logically identified as "//{resource.service}/{resource.name}".
+ // The differences between a resource name and a URI are:
+ //
+ // * Resource name is a logical identifier, independent of network
+ // protocol and API version. For example,
+ // `//pubsub.googleapis.com/projects/123/topics/news-feed`.
+ // * URI often includes protocol and version information, so it can
+ // be used directly by applications. For example,
+ // `https://pubsub.googleapis.com/v1/projects/123/topics/news-feed`.
+ //
+ // See https://cloud.google.com/apis/design/resource_names for details.
+ string name = 2;
+
+ // The type of the resource. The syntax is platform-specific because
+ // different platforms define their resources differently.
+ //
+ // For Google APIs, the type format must be "{service}/{kind}", such as
+ // "pubsub.googleapis.com/Topic".
+ string type = 3;
+
+ // The labels or tags on the resource, such as AWS resource tags and
+ // Kubernetes resource labels.
+ map labels = 4;
+
+ // The unique identifier of the resource. UID is unique in the time
+ // and space for this resource within the scope of the service. It is
+ // typically generated by the server on successful creation of a resource
+ // and must not be changed. UID is used to uniquely identify resources
+ // with resource name reuses. This should be a UUID4.
+ string uid = 5;
+
+ // Annotations is an unstructured key-value map stored with a resource that
+ // may be set by external tools to store and retrieve arbitrary metadata.
+ // They are not queryable and should be preserved when modifying objects.
+ //
+ // More info: https://kubernetes.io/docs/user-guide/annotations
+ map annotations = 6;
+
+ // Mutable. The display name set by clients. Must be <= 63 characters.
+ string display_name = 7;
+
+ // Output only. The timestamp when the resource was created. This may
+ // be either the time creation was initiated or when it was completed.
+ google.protobuf.Timestamp create_time = 8;
+
+ // Output only. The timestamp when the resource was last updated. Any
+ // change to the resource made by users must refresh this value.
+ // Changes to a resource made by the service should refresh this value.
+ google.protobuf.Timestamp update_time = 9;
+
+ // Output only. The timestamp when the resource was deleted.
+ // If the resource is not deleted, this must be empty.
+ google.protobuf.Timestamp delete_time = 10;
+
+ // Output only. An opaque value that uniquely identifies a version or
+ // generation of a resource. It can be used to confirm that the client
+ // and server agree on the ordering of a resource being written.
+ string etag = 11;
+
+ // Immutable. The location of the resource. The location encoding is
+ // specific to the service provider, and new encoding may be introduced
+ // as the service evolves.
+ //
+ // For Google Cloud products, the encoding is what is used by Google Cloud
+ // APIs, such as `us-east1`, `aws-us-east-1`, and `azure-eastus2`. The
+ // semantics of `location` is identical to the
+ // `cloud.googleapis.com/location` label used by some Google Cloud APIs.
+ string location = 12;
+ }
+
+ // The origin of a network activity. In a multi hop network activity,
+ // the origin represents the sender of the first hop. For the first hop,
+ // the `source` and the `origin` must have the same content.
+ Peer origin = 7;
+
+ // The source of a network activity, such as starting a TCP connection.
+ // In a multi hop network activity, the source represents the sender of the
+ // last hop.
+ Peer source = 1;
+
+ // The destination of a network activity, such as accepting a TCP connection.
+ // In a multi hop network activity, the destination represents the receiver of
+ // the last hop.
+ Peer destination = 2;
+
+ // Represents a network request, such as an HTTP request.
+ Request request = 3;
+
+ // Represents a network response, such as an HTTP response.
+ Response response = 4;
+
+ // Represents a target resource that is involved with a network activity.
+ // If multiple resources are involved with an activity, this must be the
+ // primary one.
+ Resource resource = 5;
+
+ // Represents an API operation that is involved to a network activity.
+ Api api = 6;
+
+ // Supports extensions for advanced use cases, such as logs and metrics.
+ repeated google.protobuf.Any extensions = 8;
+}
diff --git a/Lib/site-packages/google/rpc/context/attribute_context_pb2.py b/Lib/site-packages/google/rpc/context/attribute_context_pb2.py
new file mode 100644
index 0000000..0ab135d
--- /dev/null
+++ b/Lib/site-packages/google/rpc/context/attribute_context_pb2.py
@@ -0,0 +1,223 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/rpc/context/attribute_context.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
+from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
+from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b"\n*google/rpc/context/attribute_context.proto\x12\x12google.rpc.context\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x83\x10\n\x10\x41ttributeContext\x12\x39\n\x06origin\x18\x07 \x01(\x0b\x32).google.rpc.context.AttributeContext.Peer\x12\x39\n\x06source\x18\x01 \x01(\x0b\x32).google.rpc.context.AttributeContext.Peer\x12>\n\x0b\x64\x65stination\x18\x02 \x01(\x0b\x32).google.rpc.context.AttributeContext.Peer\x12=\n\x07request\x18\x03 \x01(\x0b\x32,.google.rpc.context.AttributeContext.Request\x12?\n\x08response\x18\x04 \x01(\x0b\x32-.google.rpc.context.AttributeContext.Response\x12?\n\x08resource\x18\x05 \x01(\x0b\x32-.google.rpc.context.AttributeContext.Resource\x12\x35\n\x03\x61pi\x18\x06 \x01(\x0b\x32(.google.rpc.context.AttributeContext.Api\x12(\n\nextensions\x18\x08 \x03(\x0b\x32\x14.google.protobuf.Any\x1a\xbe\x01\n\x04Peer\x12\n\n\x02ip\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x03\x12\x45\n\x06labels\x18\x06 \x03(\x0b\x32\x35.google.rpc.context.AttributeContext.Peer.LabelsEntry\x12\x11\n\tprincipal\x18\x07 \x01(\t\x12\x13\n\x0bregion_code\x18\x08 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1aL\n\x03\x41pi\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x11\n\toperation\x18\x02 \x01(\t\x12\x10\n\x08protocol\x18\x03 \x01(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\x1a\x7f\n\x04\x41uth\x12\x11\n\tprincipal\x18\x01 \x01(\t\x12\x11\n\taudiences\x18\x02 \x03(\t\x12\x11\n\tpresenter\x18\x03 \x01(\t\x12'\n\x06\x63laims\x18\x04 \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x15\n\raccess_levels\x18\x05 \x03(\t\x1a\xef\x02\n\x07Request\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06method\x18\x02 \x01(\t\x12J\n\x07headers\x18\x03 \x03(\x0b\x32\x39.google.rpc.context.AttributeContext.Request.HeadersEntry\x12\x0c\n\x04path\x18\x04 \x01(\t\x12\x0c\n\x04host\x18\x05 \x01(\t\x12\x0e\n\x06scheme\x18\x06 \x01(\t\x12\r\n\x05query\x18\x07 \x01(\t\x12(\n\x04time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0c\n\x04size\x18\n \x01(\x03\x12\x10\n\x08protocol\x18\x0b \x01(\t\x12\x0e\n\x06reason\x18\x0c \x01(\t\x12\x37\n\x04\x61uth\x18\r \x01(\x0b\x32).google.rpc.context.AttributeContext.Auth\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x81\x02\n\x08Response\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x03\x12\x0c\n\x04size\x18\x02 \x01(\x03\x12K\n\x07headers\x18\x03 \x03(\x0b\x32:.google.rpc.context.AttributeContext.Response.HeadersEntry\x12(\n\x04time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x0f\x62\x61\x63kend_latency\x18\x05 \x01(\x0b\x32\x19.google.protobuf.Duration\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x90\x04\n\x08Resource\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0c\n\x04type\x18\x03 \x01(\t\x12I\n\x06labels\x18\x04 \x03(\x0b\x32\x39.google.rpc.context.AttributeContext.Resource.LabelsEntry\x12\x0b\n\x03uid\x18\x05 \x01(\t\x12S\n\x0b\x61nnotations\x18\x06 \x03(\x0b\x32>.google.rpc.context.AttributeContext.Resource.AnnotationsEntry\x12\x14\n\x0c\x64isplay_name\x18\x07 \x01(\t\x12/\n\x0b\x63reate_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x64\x65lete_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0c\n\x04\x65tag\x18\x0b \x01(\t\x12\x10\n\x08location\x18\x0c \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x32\n\x10\x41nnotationsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x8b\x01\n\x16\x63om.google.rpc.contextB\x15\x41ttributeContextProtoP\x01ZUgoogle.golang.org/genproto/googleapis/rpc/context/attribute_context;attribute_context\xf8\x01\x01\x62\x06proto3"
+)
+
+
+_ATTRIBUTECONTEXT = DESCRIPTOR.message_types_by_name["AttributeContext"]
+_ATTRIBUTECONTEXT_PEER = _ATTRIBUTECONTEXT.nested_types_by_name["Peer"]
+_ATTRIBUTECONTEXT_PEER_LABELSENTRY = _ATTRIBUTECONTEXT_PEER.nested_types_by_name[
+ "LabelsEntry"
+]
+_ATTRIBUTECONTEXT_API = _ATTRIBUTECONTEXT.nested_types_by_name["Api"]
+_ATTRIBUTECONTEXT_AUTH = _ATTRIBUTECONTEXT.nested_types_by_name["Auth"]
+_ATTRIBUTECONTEXT_REQUEST = _ATTRIBUTECONTEXT.nested_types_by_name["Request"]
+_ATTRIBUTECONTEXT_REQUEST_HEADERSENTRY = _ATTRIBUTECONTEXT_REQUEST.nested_types_by_name[
+ "HeadersEntry"
+]
+_ATTRIBUTECONTEXT_RESPONSE = _ATTRIBUTECONTEXT.nested_types_by_name["Response"]
+_ATTRIBUTECONTEXT_RESPONSE_HEADERSENTRY = (
+ _ATTRIBUTECONTEXT_RESPONSE.nested_types_by_name["HeadersEntry"]
+)
+_ATTRIBUTECONTEXT_RESOURCE = _ATTRIBUTECONTEXT.nested_types_by_name["Resource"]
+_ATTRIBUTECONTEXT_RESOURCE_LABELSENTRY = (
+ _ATTRIBUTECONTEXT_RESOURCE.nested_types_by_name["LabelsEntry"]
+)
+_ATTRIBUTECONTEXT_RESOURCE_ANNOTATIONSENTRY = (
+ _ATTRIBUTECONTEXT_RESOURCE.nested_types_by_name["AnnotationsEntry"]
+)
+AttributeContext = _reflection.GeneratedProtocolMessageType(
+ "AttributeContext",
+ (_message.Message,),
+ {
+ "Peer": _reflection.GeneratedProtocolMessageType(
+ "Peer",
+ (_message.Message,),
+ {
+ "LabelsEntry": _reflection.GeneratedProtocolMessageType(
+ "LabelsEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _ATTRIBUTECONTEXT_PEER_LABELSENTRY,
+ "__module__": "google.rpc.context.attribute_context_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.context.AttributeContext.Peer.LabelsEntry)
+ },
+ ),
+ "DESCRIPTOR": _ATTRIBUTECONTEXT_PEER,
+ "__module__": "google.rpc.context.attribute_context_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.context.AttributeContext.Peer)
+ },
+ ),
+ "Api": _reflection.GeneratedProtocolMessageType(
+ "Api",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _ATTRIBUTECONTEXT_API,
+ "__module__": "google.rpc.context.attribute_context_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.context.AttributeContext.Api)
+ },
+ ),
+ "Auth": _reflection.GeneratedProtocolMessageType(
+ "Auth",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _ATTRIBUTECONTEXT_AUTH,
+ "__module__": "google.rpc.context.attribute_context_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.context.AttributeContext.Auth)
+ },
+ ),
+ "Request": _reflection.GeneratedProtocolMessageType(
+ "Request",
+ (_message.Message,),
+ {
+ "HeadersEntry": _reflection.GeneratedProtocolMessageType(
+ "HeadersEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _ATTRIBUTECONTEXT_REQUEST_HEADERSENTRY,
+ "__module__": "google.rpc.context.attribute_context_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.context.AttributeContext.Request.HeadersEntry)
+ },
+ ),
+ "DESCRIPTOR": _ATTRIBUTECONTEXT_REQUEST,
+ "__module__": "google.rpc.context.attribute_context_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.context.AttributeContext.Request)
+ },
+ ),
+ "Response": _reflection.GeneratedProtocolMessageType(
+ "Response",
+ (_message.Message,),
+ {
+ "HeadersEntry": _reflection.GeneratedProtocolMessageType(
+ "HeadersEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _ATTRIBUTECONTEXT_RESPONSE_HEADERSENTRY,
+ "__module__": "google.rpc.context.attribute_context_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.context.AttributeContext.Response.HeadersEntry)
+ },
+ ),
+ "DESCRIPTOR": _ATTRIBUTECONTEXT_RESPONSE,
+ "__module__": "google.rpc.context.attribute_context_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.context.AttributeContext.Response)
+ },
+ ),
+ "Resource": _reflection.GeneratedProtocolMessageType(
+ "Resource",
+ (_message.Message,),
+ {
+ "LabelsEntry": _reflection.GeneratedProtocolMessageType(
+ "LabelsEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _ATTRIBUTECONTEXT_RESOURCE_LABELSENTRY,
+ "__module__": "google.rpc.context.attribute_context_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.context.AttributeContext.Resource.LabelsEntry)
+ },
+ ),
+ "AnnotationsEntry": _reflection.GeneratedProtocolMessageType(
+ "AnnotationsEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _ATTRIBUTECONTEXT_RESOURCE_ANNOTATIONSENTRY,
+ "__module__": "google.rpc.context.attribute_context_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.context.AttributeContext.Resource.AnnotationsEntry)
+ },
+ ),
+ "DESCRIPTOR": _ATTRIBUTECONTEXT_RESOURCE,
+ "__module__": "google.rpc.context.attribute_context_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.context.AttributeContext.Resource)
+ },
+ ),
+ "DESCRIPTOR": _ATTRIBUTECONTEXT,
+ "__module__": "google.rpc.context.attribute_context_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.context.AttributeContext)
+ },
+)
+_sym_db.RegisterMessage(AttributeContext)
+_sym_db.RegisterMessage(AttributeContext.Peer)
+_sym_db.RegisterMessage(AttributeContext.Peer.LabelsEntry)
+_sym_db.RegisterMessage(AttributeContext.Api)
+_sym_db.RegisterMessage(AttributeContext.Auth)
+_sym_db.RegisterMessage(AttributeContext.Request)
+_sym_db.RegisterMessage(AttributeContext.Request.HeadersEntry)
+_sym_db.RegisterMessage(AttributeContext.Response)
+_sym_db.RegisterMessage(AttributeContext.Response.HeadersEntry)
+_sym_db.RegisterMessage(AttributeContext.Resource)
+_sym_db.RegisterMessage(AttributeContext.Resource.LabelsEntry)
+_sym_db.RegisterMessage(AttributeContext.Resource.AnnotationsEntry)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\026com.google.rpc.contextB\025AttributeContextProtoP\001ZUgoogle.golang.org/genproto/googleapis/rpc/context/attribute_context;attribute_context\370\001\001"
+ _ATTRIBUTECONTEXT_PEER_LABELSENTRY._options = None
+ _ATTRIBUTECONTEXT_PEER_LABELSENTRY._serialized_options = b"8\001"
+ _ATTRIBUTECONTEXT_REQUEST_HEADERSENTRY._options = None
+ _ATTRIBUTECONTEXT_REQUEST_HEADERSENTRY._serialized_options = b"8\001"
+ _ATTRIBUTECONTEXT_RESPONSE_HEADERSENTRY._options = None
+ _ATTRIBUTECONTEXT_RESPONSE_HEADERSENTRY._serialized_options = b"8\001"
+ _ATTRIBUTECONTEXT_RESOURCE_LABELSENTRY._options = None
+ _ATTRIBUTECONTEXT_RESOURCE_LABELSENTRY._serialized_options = b"8\001"
+ _ATTRIBUTECONTEXT_RESOURCE_ANNOTATIONSENTRY._options = None
+ _ATTRIBUTECONTEXT_RESOURCE_ANNOTATIONSENTRY._serialized_options = b"8\001"
+ _ATTRIBUTECONTEXT._serialized_start = 189
+ _ATTRIBUTECONTEXT._serialized_end = 2240
+ _ATTRIBUTECONTEXT_PEER._serialized_start = 682
+ _ATTRIBUTECONTEXT_PEER._serialized_end = 872
+ _ATTRIBUTECONTEXT_PEER_LABELSENTRY._serialized_start = 827
+ _ATTRIBUTECONTEXT_PEER_LABELSENTRY._serialized_end = 872
+ _ATTRIBUTECONTEXT_API._serialized_start = 874
+ _ATTRIBUTECONTEXT_API._serialized_end = 950
+ _ATTRIBUTECONTEXT_AUTH._serialized_start = 952
+ _ATTRIBUTECONTEXT_AUTH._serialized_end = 1079
+ _ATTRIBUTECONTEXT_REQUEST._serialized_start = 1082
+ _ATTRIBUTECONTEXT_REQUEST._serialized_end = 1449
+ _ATTRIBUTECONTEXT_REQUEST_HEADERSENTRY._serialized_start = 1403
+ _ATTRIBUTECONTEXT_REQUEST_HEADERSENTRY._serialized_end = 1449
+ _ATTRIBUTECONTEXT_RESPONSE._serialized_start = 1452
+ _ATTRIBUTECONTEXT_RESPONSE._serialized_end = 1709
+ _ATTRIBUTECONTEXT_RESPONSE_HEADERSENTRY._serialized_start = 1403
+ _ATTRIBUTECONTEXT_RESPONSE_HEADERSENTRY._serialized_end = 1449
+ _ATTRIBUTECONTEXT_RESOURCE._serialized_start = 1712
+ _ATTRIBUTECONTEXT_RESOURCE._serialized_end = 2240
+ _ATTRIBUTECONTEXT_RESOURCE_LABELSENTRY._serialized_start = 827
+ _ATTRIBUTECONTEXT_RESOURCE_LABELSENTRY._serialized_end = 872
+ _ATTRIBUTECONTEXT_RESOURCE_ANNOTATIONSENTRY._serialized_start = 2190
+ _ATTRIBUTECONTEXT_RESOURCE_ANNOTATIONSENTRY._serialized_end = 2240
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/rpc/context/audit_context.proto b/Lib/site-packages/google/rpc/context/audit_context.proto
new file mode 100644
index 0000000..7b8b705
--- /dev/null
+++ b/Lib/site-packages/google/rpc/context/audit_context.proto
@@ -0,0 +1,49 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.rpc.context;
+
+import "google/protobuf/struct.proto";
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/rpc/context;context";
+option java_multiple_files = true;
+option java_outer_classname = "AuditContextProto";
+option java_package = "com.google.rpc.context";
+
+// `AuditContext` provides information that is needed for audit logging.
+message AuditContext {
+ // Serialized audit log.
+ bytes audit_log = 1;
+
+ // An API request message that is scrubbed based on the method annotation.
+ // This field should only be filled if audit_log field is present.
+ // Service Control will use this to assemble a complete log for Cloud Audit
+ // Logs and Google internal audit logs.
+ google.protobuf.Struct scrubbed_request = 2;
+
+ // An API response message that is scrubbed based on the method annotation.
+ // This field should only be filled if audit_log field is present.
+ // Service Control will use this to assemble a complete log for Cloud Audit
+ // Logs and Google internal audit logs.
+ google.protobuf.Struct scrubbed_response = 3;
+
+ // Number of scrubbed response items.
+ int32 scrubbed_response_item_count = 4;
+
+ // Audit resource name which is scrubbed.
+ string target_resource = 5;
+}
diff --git a/Lib/site-packages/google/rpc/context/audit_context_pb2.py b/Lib/site-packages/google/rpc/context/audit_context_pb2.py
new file mode 100644
index 0000000..96ba00f
--- /dev/null
+++ b/Lib/site-packages/google/rpc/context/audit_context_pb2.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/rpc/context/audit_context.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n&google/rpc/context/audit_context.proto\x12\x12google.rpc.context\x1a\x1cgoogle/protobuf/struct.proto"\xc7\x01\n\x0c\x41uditContext\x12\x11\n\taudit_log\x18\x01 \x01(\x0c\x12\x31\n\x10scrubbed_request\x18\x02 \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x32\n\x11scrubbed_response\x18\x03 \x01(\x0b\x32\x17.google.protobuf.Struct\x12$\n\x1cscrubbed_response_item_count\x18\x04 \x01(\x05\x12\x17\n\x0ftarget_resource\x18\x05 \x01(\tBk\n\x16\x63om.google.rpc.contextB\x11\x41uditContextProtoP\x01Z9google.golang.org/genproto/googleapis/rpc/context;context\xf8\x01\x01\x62\x06proto3'
+)
+
+
+_AUDITCONTEXT = DESCRIPTOR.message_types_by_name["AuditContext"]
+AuditContext = _reflection.GeneratedProtocolMessageType(
+ "AuditContext",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _AUDITCONTEXT,
+ "__module__": "google.rpc.context.audit_context_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.context.AuditContext)
+ },
+)
+_sym_db.RegisterMessage(AuditContext)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\026com.google.rpc.contextB\021AuditContextProtoP\001Z9google.golang.org/genproto/googleapis/rpc/context;context\370\001\001"
+ _AUDITCONTEXT._serialized_start = 93
+ _AUDITCONTEXT._serialized_end = 292
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/rpc/error_details.proto b/Lib/site-packages/google/rpc/error_details.proto
new file mode 100644
index 0000000..c489e83
--- /dev/null
+++ b/Lib/site-packages/google/rpc/error_details.proto
@@ -0,0 +1,285 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.rpc;
+
+import "google/protobuf/duration.proto";
+
+option go_package = "google.golang.org/genproto/googleapis/rpc/errdetails;errdetails";
+option java_multiple_files = true;
+option java_outer_classname = "ErrorDetailsProto";
+option java_package = "com.google.rpc";
+option objc_class_prefix = "RPC";
+
+// Describes the cause of the error with structured details.
+//
+// Example of an error when contacting the "pubsub.googleapis.com" API when it
+// is not enabled:
+//
+// { "reason": "API_DISABLED"
+// "domain": "googleapis.com"
+// "metadata": {
+// "resource": "projects/123",
+// "service": "pubsub.googleapis.com"
+// }
+// }
+//
+// This response indicates that the pubsub.googleapis.com API is not enabled.
+//
+// Example of an error that is returned when attempting to create a Spanner
+// instance in a region that is out of stock:
+//
+// { "reason": "STOCKOUT"
+// "domain": "spanner.googleapis.com",
+// "metadata": {
+// "availableRegions": "us-central1,us-east2"
+// }
+// }
+message ErrorInfo {
+ // The reason of the error. This is a constant value that identifies the
+ // proximate cause of the error. Error reasons are unique within a particular
+ // domain of errors. This should be at most 63 characters and match a
+ // regular expression of `[A-Z][A-Z0-9_]+[A-Z0-9]`, which represents
+ // UPPER_SNAKE_CASE.
+ string reason = 1;
+
+ // The logical grouping to which the "reason" belongs. The error domain
+ // is typically the registered service name of the tool or product that
+ // generates the error. Example: "pubsub.googleapis.com". If the error is
+ // generated by some common infrastructure, the error domain must be a
+ // globally unique value that identifies the infrastructure. For Google API
+ // infrastructure, the error domain is "googleapis.com".
+ string domain = 2;
+
+ // Additional structured details about this error.
+ //
+ // Keys should match /[a-zA-Z0-9-_]/ and be limited to 64 characters in
+ // length. When identifying the current value of an exceeded limit, the units
+ // should be contained in the key, not the value. For example, rather than
+ // {"instanceLimit": "100/request"}, should be returned as,
+ // {"instanceLimitPerRequest": "100"}, if the client exceeds the number of
+ // instances that can be created in a single (batch) request.
+ map metadata = 3;
+}
+
+// Describes when the clients can retry a failed request. Clients could ignore
+// the recommendation here or retry when this information is missing from error
+// responses.
+//
+// It's always recommended that clients should use exponential backoff when
+// retrying.
+//
+// Clients should wait until `retry_delay` amount of time has passed since
+// receiving the error response before retrying. If retrying requests also
+// fail, clients should use an exponential backoff scheme to gradually increase
+// the delay between retries based on `retry_delay`, until either a maximum
+// number of retries have been reached or a maximum retry delay cap has been
+// reached.
+message RetryInfo {
+ // Clients should wait at least this long between retrying the same request.
+ google.protobuf.Duration retry_delay = 1;
+}
+
+// Describes additional debugging info.
+message DebugInfo {
+ // The stack trace entries indicating where the error occurred.
+ repeated string stack_entries = 1;
+
+ // Additional debugging information provided by the server.
+ string detail = 2;
+}
+
+// Describes how a quota check failed.
+//
+// For example if a daily limit was exceeded for the calling project,
+// a service could respond with a QuotaFailure detail containing the project
+// id and the description of the quota limit that was exceeded. If the
+// calling project hasn't enabled the service in the developer console, then
+// a service could respond with the project id and set `service_disabled`
+// to true.
+//
+// Also see RetryInfo and Help types for other details about handling a
+// quota failure.
+message QuotaFailure {
+ // A message type used to describe a single quota violation. For example, a
+ // daily quota or a custom quota that was exceeded.
+ message Violation {
+ // The subject on which the quota check failed.
+ // For example, "clientip:" or "project:".
+ string subject = 1;
+
+ // A description of how the quota check failed. Clients can use this
+ // description to find more about the quota configuration in the service's
+ // public documentation, or find the relevant quota limit to adjust through
+ // developer console.
+ //
+ // For example: "Service disabled" or "Daily Limit for read operations
+ // exceeded".
+ string description = 2;
+ }
+
+ // Describes all quota violations.
+ repeated Violation violations = 1;
+}
+
+// Describes what preconditions have failed.
+//
+// For example, if an RPC failed because it required the Terms of Service to be
+// acknowledged, it could list the terms of service violation in the
+// PreconditionFailure message.
+message PreconditionFailure {
+ // A message type used to describe a single precondition failure.
+ message Violation {
+ // The type of PreconditionFailure. We recommend using a service-specific
+ // enum type to define the supported precondition violation subjects. For
+ // example, "TOS" for "Terms of Service violation".
+ string type = 1;
+
+ // The subject, relative to the type, that failed.
+ // For example, "google.com/cloud" relative to the "TOS" type would indicate
+ // which terms of service is being referenced.
+ string subject = 2;
+
+ // A description of how the precondition failed. Developers can use this
+ // description to understand how to fix the failure.
+ //
+ // For example: "Terms of service not accepted".
+ string description = 3;
+ }
+
+ // Describes all precondition violations.
+ repeated Violation violations = 1;
+}
+
+// Describes violations in a client request. This error type focuses on the
+// syntactic aspects of the request.
+message BadRequest {
+ // A message type used to describe a single bad request field.
+ message FieldViolation {
+ // A path that leads to a field in the request body. The value will be a
+ // sequence of dot-separated identifiers that identify a protocol buffer
+ // field.
+ //
+ // Consider the following:
+ //
+ // message CreateContactRequest {
+ // message EmailAddress {
+ // enum Type {
+ // TYPE_UNSPECIFIED = 0;
+ // HOME = 1;
+ // WORK = 2;
+ // }
+ //
+ // optional string email = 1;
+ // repeated EmailType type = 2;
+ // }
+ //
+ // string full_name = 1;
+ // repeated EmailAddress email_addresses = 2;
+ // }
+ //
+ // In this example, in proto `field` could take one of the following values:
+ //
+ // * `full_name` for a violation in the `full_name` value
+ // * `email_addresses[1].email` for a violation in the `email` field of the
+ // first `email_addresses` message
+ // * `email_addresses[3].type[2]` for a violation in the second `type`
+ // value in the third `email_addresses` message.
+ //
+ // In JSON, the same values are represented as:
+ //
+ // * `fullName` for a violation in the `fullName` value
+ // * `emailAddresses[1].email` for a violation in the `email` field of the
+ // first `emailAddresses` message
+ // * `emailAddresses[3].type[2]` for a violation in the second `type`
+ // value in the third `emailAddresses` message.
+ string field = 1;
+
+ // A description of why the request element is bad.
+ string description = 2;
+ }
+
+ // Describes all violations in a client request.
+ repeated FieldViolation field_violations = 1;
+}
+
+// Contains metadata about the request that clients can attach when filing a bug
+// or providing other forms of feedback.
+message RequestInfo {
+ // An opaque string that should only be interpreted by the service generating
+ // it. For example, it can be used to identify requests in the service's logs.
+ string request_id = 1;
+
+ // Any data that was used to serve this request. For example, an encrypted
+ // stack trace that can be sent back to the service provider for debugging.
+ string serving_data = 2;
+}
+
+// Describes the resource that is being accessed.
+message ResourceInfo {
+ // A name for the type of resource being accessed, e.g. "sql table",
+ // "cloud storage bucket", "file", "Google calendar"; or the type URL
+ // of the resource: e.g. "type.googleapis.com/google.pubsub.v1.Topic".
+ string resource_type = 1;
+
+ // The name of the resource being accessed. For example, a shared calendar
+ // name: "example.com_4fghdhgsrgh@group.calendar.google.com", if the current
+ // error is
+ // [google.rpc.Code.PERMISSION_DENIED][google.rpc.Code.PERMISSION_DENIED].
+ string resource_name = 2;
+
+ // The owner of the resource (optional).
+ // For example, "user:" or "project:".
+ string owner = 3;
+
+ // Describes what error is encountered when accessing this resource.
+ // For example, updating a cloud project may require the `writer` permission
+ // on the developer console project.
+ string description = 4;
+}
+
+// Provides links to documentation or for performing an out of band action.
+//
+// For example, if a quota check failed with an error indicating the calling
+// project hasn't enabled the accessed service, this can contain a URL pointing
+// directly to the right place in the developer console to flip the bit.
+message Help {
+ // Describes a URL link.
+ message Link {
+ // Describes what the link offers.
+ string description = 1;
+
+ // The URL of the link.
+ string url = 2;
+ }
+
+ // URL(s) pointing to additional information on handling the current error.
+ repeated Link links = 1;
+}
+
+// Provides a localized error message that is safe to return to the user
+// which can be attached to an RPC error.
+message LocalizedMessage {
+ // The locale used following the specification defined at
+ // https://www.rfc-editor.org/rfc/bcp/bcp47.txt.
+ // Examples are: "en-US", "fr-CH", "es-MX"
+ string locale = 1;
+
+ // The localized error message in the above locale.
+ string message = 2;
+}
diff --git a/Lib/site-packages/google/rpc/error_details_pb2.py b/Lib/site-packages/google/rpc/error_details_pb2.py
new file mode 100644
index 0000000..e9f26f5
--- /dev/null
+++ b/Lib/site-packages/google/rpc/error_details_pb2.py
@@ -0,0 +1,250 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/rpc/error_details.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x1egoogle/rpc/error_details.proto\x12\ngoogle.rpc\x1a\x1egoogle/protobuf/duration.proto"\x93\x01\n\tErrorInfo\x12\x0e\n\x06reason\x18\x01 \x01(\t\x12\x0e\n\x06\x64omain\x18\x02 \x01(\t\x12\x35\n\x08metadata\x18\x03 \x03(\x0b\x32#.google.rpc.ErrorInfo.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01";\n\tRetryInfo\x12.\n\x0bretry_delay\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"2\n\tDebugInfo\x12\x15\n\rstack_entries\x18\x01 \x03(\t\x12\x0e\n\x06\x64\x65tail\x18\x02 \x01(\t"y\n\x0cQuotaFailure\x12\x36\n\nviolations\x18\x01 \x03(\x0b\x32".google.rpc.QuotaFailure.Violation\x1a\x31\n\tViolation\x12\x0f\n\x07subject\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t"\x95\x01\n\x13PreconditionFailure\x12=\n\nviolations\x18\x01 \x03(\x0b\x32).google.rpc.PreconditionFailure.Violation\x1a?\n\tViolation\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0f\n\x07subject\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t"\x83\x01\n\nBadRequest\x12?\n\x10\x66ield_violations\x18\x01 \x03(\x0b\x32%.google.rpc.BadRequest.FieldViolation\x1a\x34\n\x0e\x46ieldViolation\x12\r\n\x05\x66ield\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t"7\n\x0bRequestInfo\x12\x12\n\nrequest_id\x18\x01 \x01(\t\x12\x14\n\x0cserving_data\x18\x02 \x01(\t"`\n\x0cResourceInfo\x12\x15\n\rresource_type\x18\x01 \x01(\t\x12\x15\n\rresource_name\x18\x02 \x01(\t\x12\r\n\x05owner\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t"V\n\x04Help\x12$\n\x05links\x18\x01 \x03(\x0b\x32\x15.google.rpc.Help.Link\x1a(\n\x04Link\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t"3\n\x10LocalizedMessage\x12\x0e\n\x06locale\x18\x01 \x01(\t\x12\x0f\n\x07message\x18\x02 \x01(\tBl\n\x0e\x63om.google.rpcB\x11\x45rrorDetailsProtoP\x01Z?google.golang.org/genproto/googleapis/rpc/errdetails;errdetails\xa2\x02\x03RPCb\x06proto3'
+)
+
+
+_ERRORINFO = DESCRIPTOR.message_types_by_name["ErrorInfo"]
+_ERRORINFO_METADATAENTRY = _ERRORINFO.nested_types_by_name["MetadataEntry"]
+_RETRYINFO = DESCRIPTOR.message_types_by_name["RetryInfo"]
+_DEBUGINFO = DESCRIPTOR.message_types_by_name["DebugInfo"]
+_QUOTAFAILURE = DESCRIPTOR.message_types_by_name["QuotaFailure"]
+_QUOTAFAILURE_VIOLATION = _QUOTAFAILURE.nested_types_by_name["Violation"]
+_PRECONDITIONFAILURE = DESCRIPTOR.message_types_by_name["PreconditionFailure"]
+_PRECONDITIONFAILURE_VIOLATION = _PRECONDITIONFAILURE.nested_types_by_name["Violation"]
+_BADREQUEST = DESCRIPTOR.message_types_by_name["BadRequest"]
+_BADREQUEST_FIELDVIOLATION = _BADREQUEST.nested_types_by_name["FieldViolation"]
+_REQUESTINFO = DESCRIPTOR.message_types_by_name["RequestInfo"]
+_RESOURCEINFO = DESCRIPTOR.message_types_by_name["ResourceInfo"]
+_HELP = DESCRIPTOR.message_types_by_name["Help"]
+_HELP_LINK = _HELP.nested_types_by_name["Link"]
+_LOCALIZEDMESSAGE = DESCRIPTOR.message_types_by_name["LocalizedMessage"]
+ErrorInfo = _reflection.GeneratedProtocolMessageType(
+ "ErrorInfo",
+ (_message.Message,),
+ {
+ "MetadataEntry": _reflection.GeneratedProtocolMessageType(
+ "MetadataEntry",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _ERRORINFO_METADATAENTRY,
+ "__module__": "google.rpc.error_details_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.ErrorInfo.MetadataEntry)
+ },
+ ),
+ "DESCRIPTOR": _ERRORINFO,
+ "__module__": "google.rpc.error_details_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.ErrorInfo)
+ },
+)
+_sym_db.RegisterMessage(ErrorInfo)
+_sym_db.RegisterMessage(ErrorInfo.MetadataEntry)
+
+RetryInfo = _reflection.GeneratedProtocolMessageType(
+ "RetryInfo",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _RETRYINFO,
+ "__module__": "google.rpc.error_details_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.RetryInfo)
+ },
+)
+_sym_db.RegisterMessage(RetryInfo)
+
+DebugInfo = _reflection.GeneratedProtocolMessageType(
+ "DebugInfo",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _DEBUGINFO,
+ "__module__": "google.rpc.error_details_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.DebugInfo)
+ },
+)
+_sym_db.RegisterMessage(DebugInfo)
+
+QuotaFailure = _reflection.GeneratedProtocolMessageType(
+ "QuotaFailure",
+ (_message.Message,),
+ {
+ "Violation": _reflection.GeneratedProtocolMessageType(
+ "Violation",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _QUOTAFAILURE_VIOLATION,
+ "__module__": "google.rpc.error_details_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.QuotaFailure.Violation)
+ },
+ ),
+ "DESCRIPTOR": _QUOTAFAILURE,
+ "__module__": "google.rpc.error_details_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.QuotaFailure)
+ },
+)
+_sym_db.RegisterMessage(QuotaFailure)
+_sym_db.RegisterMessage(QuotaFailure.Violation)
+
+PreconditionFailure = _reflection.GeneratedProtocolMessageType(
+ "PreconditionFailure",
+ (_message.Message,),
+ {
+ "Violation": _reflection.GeneratedProtocolMessageType(
+ "Violation",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _PRECONDITIONFAILURE_VIOLATION,
+ "__module__": "google.rpc.error_details_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.PreconditionFailure.Violation)
+ },
+ ),
+ "DESCRIPTOR": _PRECONDITIONFAILURE,
+ "__module__": "google.rpc.error_details_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.PreconditionFailure)
+ },
+)
+_sym_db.RegisterMessage(PreconditionFailure)
+_sym_db.RegisterMessage(PreconditionFailure.Violation)
+
+BadRequest = _reflection.GeneratedProtocolMessageType(
+ "BadRequest",
+ (_message.Message,),
+ {
+ "FieldViolation": _reflection.GeneratedProtocolMessageType(
+ "FieldViolation",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _BADREQUEST_FIELDVIOLATION,
+ "__module__": "google.rpc.error_details_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.BadRequest.FieldViolation)
+ },
+ ),
+ "DESCRIPTOR": _BADREQUEST,
+ "__module__": "google.rpc.error_details_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.BadRequest)
+ },
+)
+_sym_db.RegisterMessage(BadRequest)
+_sym_db.RegisterMessage(BadRequest.FieldViolation)
+
+RequestInfo = _reflection.GeneratedProtocolMessageType(
+ "RequestInfo",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _REQUESTINFO,
+ "__module__": "google.rpc.error_details_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.RequestInfo)
+ },
+)
+_sym_db.RegisterMessage(RequestInfo)
+
+ResourceInfo = _reflection.GeneratedProtocolMessageType(
+ "ResourceInfo",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _RESOURCEINFO,
+ "__module__": "google.rpc.error_details_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.ResourceInfo)
+ },
+)
+_sym_db.RegisterMessage(ResourceInfo)
+
+Help = _reflection.GeneratedProtocolMessageType(
+ "Help",
+ (_message.Message,),
+ {
+ "Link": _reflection.GeneratedProtocolMessageType(
+ "Link",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _HELP_LINK,
+ "__module__": "google.rpc.error_details_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.Help.Link)
+ },
+ ),
+ "DESCRIPTOR": _HELP,
+ "__module__": "google.rpc.error_details_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.Help)
+ },
+)
+_sym_db.RegisterMessage(Help)
+_sym_db.RegisterMessage(Help.Link)
+
+LocalizedMessage = _reflection.GeneratedProtocolMessageType(
+ "LocalizedMessage",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LOCALIZEDMESSAGE,
+ "__module__": "google.rpc.error_details_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.LocalizedMessage)
+ },
+)
+_sym_db.RegisterMessage(LocalizedMessage)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.rpcB\021ErrorDetailsProtoP\001Z?google.golang.org/genproto/googleapis/rpc/errdetails;errdetails\242\002\003RPC"
+ _ERRORINFO_METADATAENTRY._options = None
+ _ERRORINFO_METADATAENTRY._serialized_options = b"8\001"
+ _ERRORINFO._serialized_start = 79
+ _ERRORINFO._serialized_end = 226
+ _ERRORINFO_METADATAENTRY._serialized_start = 179
+ _ERRORINFO_METADATAENTRY._serialized_end = 226
+ _RETRYINFO._serialized_start = 228
+ _RETRYINFO._serialized_end = 287
+ _DEBUGINFO._serialized_start = 289
+ _DEBUGINFO._serialized_end = 339
+ _QUOTAFAILURE._serialized_start = 341
+ _QUOTAFAILURE._serialized_end = 462
+ _QUOTAFAILURE_VIOLATION._serialized_start = 413
+ _QUOTAFAILURE_VIOLATION._serialized_end = 462
+ _PRECONDITIONFAILURE._serialized_start = 465
+ _PRECONDITIONFAILURE._serialized_end = 614
+ _PRECONDITIONFAILURE_VIOLATION._serialized_start = 551
+ _PRECONDITIONFAILURE_VIOLATION._serialized_end = 614
+ _BADREQUEST._serialized_start = 617
+ _BADREQUEST._serialized_end = 748
+ _BADREQUEST_FIELDVIOLATION._serialized_start = 696
+ _BADREQUEST_FIELDVIOLATION._serialized_end = 748
+ _REQUESTINFO._serialized_start = 750
+ _REQUESTINFO._serialized_end = 805
+ _RESOURCEINFO._serialized_start = 807
+ _RESOURCEINFO._serialized_end = 903
+ _HELP._serialized_start = 905
+ _HELP._serialized_end = 991
+ _HELP_LINK._serialized_start = 951
+ _HELP_LINK._serialized_end = 991
+ _LOCALIZEDMESSAGE._serialized_start = 993
+ _LOCALIZEDMESSAGE._serialized_end = 1044
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/rpc/http.proto b/Lib/site-packages/google/rpc/http.proto
new file mode 100644
index 0000000..299a71f
--- /dev/null
+++ b/Lib/site-packages/google/rpc/http.proto
@@ -0,0 +1,64 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.rpc;
+
+option go_package = "google.golang.org/genproto/googleapis/rpc/http;http";
+option java_multiple_files = true;
+option java_outer_classname = "HttpProto";
+option java_package = "com.google.rpc";
+option objc_class_prefix = "RPC";
+
+// Represents an HTTP request.
+message HttpRequest {
+ // The HTTP request method.
+ string method = 1;
+
+ // The HTTP request URI.
+ string uri = 2;
+
+ // The HTTP request headers. The ordering of the headers is significant.
+ // Multiple headers with the same key may present for the request.
+ repeated HttpHeader headers = 3;
+
+ // The HTTP request body. If the body is not expected, it should be empty.
+ bytes body = 4;
+}
+
+// Represents an HTTP response.
+message HttpResponse {
+ // The HTTP status code, such as 200 or 404.
+ int32 status = 1;
+
+ // The HTTP reason phrase, such as "OK" or "Not Found".
+ string reason = 2;
+
+ // The HTTP response headers. The ordering of the headers is significant.
+ // Multiple headers with the same key may present for the response.
+ repeated HttpHeader headers = 3;
+
+ // The HTTP response body. If the body is not expected, it should be empty.
+ bytes body = 4;
+}
+
+// Represents an HTTP header.
+message HttpHeader {
+ // The HTTP header key. It is case insensitive.
+ string key = 1;
+
+ // The HTTP header value.
+ string value = 2;
+}
diff --git a/Lib/site-packages/google/rpc/http_pb2.py b/Lib/site-packages/google/rpc/http_pb2.py
new file mode 100644
index 0000000..71be9b7
--- /dev/null
+++ b/Lib/site-packages/google/rpc/http_pb2.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/rpc/http.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x15google/rpc/http.proto\x12\ngoogle.rpc"a\n\x0bHttpRequest\x12\x0e\n\x06method\x18\x01 \x01(\t\x12\x0b\n\x03uri\x18\x02 \x01(\t\x12\'\n\x07headers\x18\x03 \x03(\x0b\x32\x16.google.rpc.HttpHeader\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c"e\n\x0cHttpResponse\x12\x0e\n\x06status\x18\x01 \x01(\x05\x12\x0e\n\x06reason\x18\x02 \x01(\t\x12\'\n\x07headers\x18\x03 \x03(\x0b\x32\x16.google.rpc.HttpHeader\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c"(\n\nHttpHeader\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\tBX\n\x0e\x63om.google.rpcB\tHttpProtoP\x01Z3google.golang.org/genproto/googleapis/rpc/http;http\xa2\x02\x03RPCb\x06proto3'
+)
+
+
+_HTTPREQUEST = DESCRIPTOR.message_types_by_name["HttpRequest"]
+_HTTPRESPONSE = DESCRIPTOR.message_types_by_name["HttpResponse"]
+_HTTPHEADER = DESCRIPTOR.message_types_by_name["HttpHeader"]
+HttpRequest = _reflection.GeneratedProtocolMessageType(
+ "HttpRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _HTTPREQUEST,
+ "__module__": "google.rpc.http_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.HttpRequest)
+ },
+)
+_sym_db.RegisterMessage(HttpRequest)
+
+HttpResponse = _reflection.GeneratedProtocolMessageType(
+ "HttpResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _HTTPRESPONSE,
+ "__module__": "google.rpc.http_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.HttpResponse)
+ },
+)
+_sym_db.RegisterMessage(HttpResponse)
+
+HttpHeader = _reflection.GeneratedProtocolMessageType(
+ "HttpHeader",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _HTTPHEADER,
+ "__module__": "google.rpc.http_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.HttpHeader)
+ },
+)
+_sym_db.RegisterMessage(HttpHeader)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.rpcB\tHttpProtoP\001Z3google.golang.org/genproto/googleapis/rpc/http;http\242\002\003RPC"
+ _HTTPREQUEST._serialized_start = 37
+ _HTTPREQUEST._serialized_end = 134
+ _HTTPRESPONSE._serialized_start = 136
+ _HTTPRESPONSE._serialized_end = 237
+ _HTTPHEADER._serialized_start = 239
+ _HTTPHEADER._serialized_end = 279
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/rpc/status.proto b/Lib/site-packages/google/rpc/status.proto
new file mode 100644
index 0000000..923e169
--- /dev/null
+++ b/Lib/site-packages/google/rpc/status.proto
@@ -0,0 +1,49 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.rpc;
+
+import "google/protobuf/any.proto";
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/rpc/status;status";
+option java_multiple_files = true;
+option java_outer_classname = "StatusProto";
+option java_package = "com.google.rpc";
+option objc_class_prefix = "RPC";
+
+// The `Status` type defines a logical error model that is suitable for
+// different programming environments, including REST APIs and RPC APIs. It is
+// used by [gRPC](https://github.com/grpc). Each `Status` message contains
+// three pieces of data: error code, error message, and error details.
+//
+// You can find out more about this error model and how to work with it in the
+// [API Design Guide](https://cloud.google.com/apis/design/errors).
+message Status {
+ // The status code, which should be an enum value of
+ // [google.rpc.Code][google.rpc.Code].
+ int32 code = 1;
+
+ // A developer-facing error message, which should be in English. Any
+ // user-facing error message should be localized and sent in the
+ // [google.rpc.Status.details][google.rpc.Status.details] field, or localized
+ // by the client.
+ string message = 2;
+
+ // A list of messages that carry the error details. There is a common set of
+ // message types for APIs to use.
+ repeated google.protobuf.Any details = 3;
+}
diff --git a/Lib/site-packages/google/rpc/status_pb2.py b/Lib/site-packages/google/rpc/status_pb2.py
new file mode 100644
index 0000000..23f1f36
--- /dev/null
+++ b/Lib/site-packages/google/rpc/status_pb2.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/rpc/status.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x17google/rpc/status.proto\x12\ngoogle.rpc\x1a\x19google/protobuf/any.proto"N\n\x06Status\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x0f\n\x07message\x18\x02 \x01(\t\x12%\n\x07\x64\x65tails\x18\x03 \x03(\x0b\x32\x14.google.protobuf.AnyBa\n\x0e\x63om.google.rpcB\x0bStatusProtoP\x01Z7google.golang.org/genproto/googleapis/rpc/status;status\xf8\x01\x01\xa2\x02\x03RPCb\x06proto3'
+)
+
+
+_STATUS = DESCRIPTOR.message_types_by_name["Status"]
+Status = _reflection.GeneratedProtocolMessageType(
+ "Status",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _STATUS,
+ "__module__": "google.rpc.status_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.Status)
+ },
+)
+_sym_db.RegisterMessage(Status)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\016com.google.rpcB\013StatusProtoP\001Z7google.golang.org/genproto/googleapis/rpc/status;status\370\001\001\242\002\003RPC"
+ _STATUS._serialized_start = 66
+ _STATUS._serialized_end = 144
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/type/calendar_period.proto b/Lib/site-packages/google/type/calendar_period.proto
new file mode 100644
index 0000000..82f5690
--- /dev/null
+++ b/Lib/site-packages/google/type/calendar_period.proto
@@ -0,0 +1,56 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.type;
+
+option go_package = "google.golang.org/genproto/googleapis/type/calendarperiod;calendarperiod";
+option java_multiple_files = true;
+option java_outer_classname = "CalendarPeriodProto";
+option java_package = "com.google.type";
+option objc_class_prefix = "GTP";
+
+// A `CalendarPeriod` represents the abstract concept of a time period that has
+// a canonical start. Grammatically, "the start of the current
+// `CalendarPeriod`." All calendar times begin at midnight UTC.
+enum CalendarPeriod {
+ // Undefined period, raises an error.
+ CALENDAR_PERIOD_UNSPECIFIED = 0;
+
+ // A day.
+ DAY = 1;
+
+ // A week. Weeks begin on Monday, following
+ // [ISO 8601](https://en.wikipedia.org/wiki/ISO_week_date).
+ WEEK = 2;
+
+ // A fortnight. The first calendar fortnight of the year begins at the start
+ // of week 1 according to
+ // [ISO 8601](https://en.wikipedia.org/wiki/ISO_week_date).
+ FORTNIGHT = 3;
+
+ // A month.
+ MONTH = 4;
+
+ // A quarter. Quarters start on dates 1-Jan, 1-Apr, 1-Jul, and 1-Oct of each
+ // year.
+ QUARTER = 5;
+
+ // A half-year. Half-years start on dates 1-Jan and 1-Jul.
+ HALF = 6;
+
+ // A year.
+ YEAR = 7;
+}
diff --git a/Lib/site-packages/google/type/calendar_period_pb2.py b/Lib/site-packages/google/type/calendar_period_pb2.py
new file mode 100644
index 0000000..c3861cc
--- /dev/null
+++ b/Lib/site-packages/google/type/calendar_period_pb2.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/type/calendar_period.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b"\n!google/type/calendar_period.proto\x12\x0bgoogle.type*\x7f\n\x0e\x43\x61lendarPeriod\x12\x1f\n\x1b\x43\x41LENDAR_PERIOD_UNSPECIFIED\x10\x00\x12\x07\n\x03\x44\x41Y\x10\x01\x12\x08\n\x04WEEK\x10\x02\x12\r\n\tFORTNIGHT\x10\x03\x12\t\n\x05MONTH\x10\x04\x12\x0b\n\x07QUARTER\x10\x05\x12\x08\n\x04HALF\x10\x06\x12\x08\n\x04YEAR\x10\x07\x42x\n\x0f\x63om.google.typeB\x13\x43\x61lendarPeriodProtoP\x01ZHgoogle.golang.org/genproto/googleapis/type/calendarperiod;calendarperiod\xa2\x02\x03GTPb\x06proto3"
+)
+
+_CALENDARPERIOD = DESCRIPTOR.enum_types_by_name["CalendarPeriod"]
+CalendarPeriod = enum_type_wrapper.EnumTypeWrapper(_CALENDARPERIOD)
+CALENDAR_PERIOD_UNSPECIFIED = 0
+DAY = 1
+WEEK = 2
+FORTNIGHT = 3
+MONTH = 4
+QUARTER = 5
+HALF = 6
+YEAR = 7
+
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\017com.google.typeB\023CalendarPeriodProtoP\001ZHgoogle.golang.org/genproto/googleapis/type/calendarperiod;calendarperiod\242\002\003GTP"
+ _CALENDARPERIOD._serialized_start = 50
+ _CALENDARPERIOD._serialized_end = 177
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/type/color.proto b/Lib/site-packages/google/type/color.proto
new file mode 100644
index 0000000..5dc85a6
--- /dev/null
+++ b/Lib/site-packages/google/type/color.proto
@@ -0,0 +1,174 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.type;
+
+import "google/protobuf/wrappers.proto";
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/type/color;color";
+option java_multiple_files = true;
+option java_outer_classname = "ColorProto";
+option java_package = "com.google.type";
+option objc_class_prefix = "GTP";
+
+// Represents a color in the RGBA color space. This representation is designed
+// for simplicity of conversion to/from color representations in various
+// languages over compactness. For example, the fields of this representation
+// can be trivially provided to the constructor of `java.awt.Color` in Java; it
+// can also be trivially provided to UIColor's `+colorWithRed:green:blue:alpha`
+// method in iOS; and, with just a little work, it can be easily formatted into
+// a CSS `rgba()` string in JavaScript.
+//
+// This reference page doesn't carry information about the absolute color
+// space
+// that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB,
+// DCI-P3, BT.2020, etc.). By default, applications should assume the sRGB color
+// space.
+//
+// When color equality needs to be decided, implementations, unless
+// documented otherwise, treat two colors as equal if all their red,
+// green, blue, and alpha values each differ by at most 1e-5.
+//
+// Example (Java):
+//
+// import com.google.type.Color;
+//
+// // ...
+// public static java.awt.Color fromProto(Color protocolor) {
+// float alpha = protocolor.hasAlpha()
+// ? protocolor.getAlpha().getValue()
+// : 1.0;
+//
+// return new java.awt.Color(
+// protocolor.getRed(),
+// protocolor.getGreen(),
+// protocolor.getBlue(),
+// alpha);
+// }
+//
+// public static Color toProto(java.awt.Color color) {
+// float red = (float) color.getRed();
+// float green = (float) color.getGreen();
+// float blue = (float) color.getBlue();
+// float denominator = 255.0;
+// Color.Builder resultBuilder =
+// Color
+// .newBuilder()
+// .setRed(red / denominator)
+// .setGreen(green / denominator)
+// .setBlue(blue / denominator);
+// int alpha = color.getAlpha();
+// if (alpha != 255) {
+// result.setAlpha(
+// FloatValue
+// .newBuilder()
+// .setValue(((float) alpha) / denominator)
+// .build());
+// }
+// return resultBuilder.build();
+// }
+// // ...
+//
+// Example (iOS / Obj-C):
+//
+// // ...
+// static UIColor* fromProto(Color* protocolor) {
+// float red = [protocolor red];
+// float green = [protocolor green];
+// float blue = [protocolor blue];
+// FloatValue* alpha_wrapper = [protocolor alpha];
+// float alpha = 1.0;
+// if (alpha_wrapper != nil) {
+// alpha = [alpha_wrapper value];
+// }
+// return [UIColor colorWithRed:red green:green blue:blue alpha:alpha];
+// }
+//
+// static Color* toProto(UIColor* color) {
+// CGFloat red, green, blue, alpha;
+// if (![color getRed:&red green:&green blue:&blue alpha:&alpha]) {
+// return nil;
+// }
+// Color* result = [[Color alloc] init];
+// [result setRed:red];
+// [result setGreen:green];
+// [result setBlue:blue];
+// if (alpha <= 0.9999) {
+// [result setAlpha:floatWrapperWithValue(alpha)];
+// }
+// [result autorelease];
+// return result;
+// }
+// // ...
+//
+// Example (JavaScript):
+//
+// // ...
+//
+// var protoToCssColor = function(rgb_color) {
+// var redFrac = rgb_color.red || 0.0;
+// var greenFrac = rgb_color.green || 0.0;
+// var blueFrac = rgb_color.blue || 0.0;
+// var red = Math.floor(redFrac * 255);
+// var green = Math.floor(greenFrac * 255);
+// var blue = Math.floor(blueFrac * 255);
+//
+// if (!('alpha' in rgb_color)) {
+// return rgbToCssColor(red, green, blue);
+// }
+//
+// var alphaFrac = rgb_color.alpha.value || 0.0;
+// var rgbParams = [red, green, blue].join(',');
+// return ['rgba(', rgbParams, ',', alphaFrac, ')'].join('');
+// };
+//
+// var rgbToCssColor = function(red, green, blue) {
+// var rgbNumber = new Number((red << 16) | (green << 8) | blue);
+// var hexString = rgbNumber.toString(16);
+// var missingZeros = 6 - hexString.length;
+// var resultBuilder = ['#'];
+// for (var i = 0; i < missingZeros; i++) {
+// resultBuilder.push('0');
+// }
+// resultBuilder.push(hexString);
+// return resultBuilder.join('');
+// };
+//
+// // ...
+message Color {
+ // The amount of red in the color as a value in the interval [0, 1].
+ float red = 1;
+
+ // The amount of green in the color as a value in the interval [0, 1].
+ float green = 2;
+
+ // The amount of blue in the color as a value in the interval [0, 1].
+ float blue = 3;
+
+ // The fraction of this color that should be applied to the pixel. That is,
+ // the final pixel color is defined by the equation:
+ //
+ // `pixel color = alpha * (this color) + (1.0 - alpha) * (background color)`
+ //
+ // This means that a value of 1.0 corresponds to a solid color, whereas
+ // a value of 0.0 corresponds to a completely transparent color. This
+ // uses a wrapper message rather than a simple float scalar so that it is
+ // possible to distinguish between a default value and the value being unset.
+ // If omitted, this color object is rendered as a solid color
+ // (as if the alpha value had been explicitly given a value of 1.0).
+ google.protobuf.FloatValue alpha = 4;
+}
diff --git a/Lib/site-packages/google/type/color_pb2.py b/Lib/site-packages/google/type/color_pb2.py
new file mode 100644
index 0000000..37fffc0
--- /dev/null
+++ b/Lib/site-packages/google/type/color_pb2.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/type/color.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x17google/type/color.proto\x12\x0bgoogle.type\x1a\x1egoogle/protobuf/wrappers.proto"]\n\x05\x43olor\x12\x0b\n\x03red\x18\x01 \x01(\x02\x12\r\n\x05green\x18\x02 \x01(\x02\x12\x0c\n\x04\x62lue\x18\x03 \x01(\x02\x12*\n\x05\x61lpha\x18\x04 \x01(\x0b\x32\x1b.google.protobuf.FloatValueB`\n\x0f\x63om.google.typeB\nColorProtoP\x01Z6google.golang.org/genproto/googleapis/type/color;color\xf8\x01\x01\xa2\x02\x03GTPb\x06proto3'
+)
+
+
+_COLOR = DESCRIPTOR.message_types_by_name["Color"]
+Color = _reflection.GeneratedProtocolMessageType(
+ "Color",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _COLOR,
+ "__module__": "google.type.color_pb2"
+ # @@protoc_insertion_point(class_scope:google.type.Color)
+ },
+)
+_sym_db.RegisterMessage(Color)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\017com.google.typeB\nColorProtoP\001Z6google.golang.org/genproto/googleapis/type/color;color\370\001\001\242\002\003GTP"
+ _COLOR._serialized_start = 72
+ _COLOR._serialized_end = 165
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/type/date.proto b/Lib/site-packages/google/type/date.proto
new file mode 100644
index 0000000..e4e730e
--- /dev/null
+++ b/Lib/site-packages/google/type/date.proto
@@ -0,0 +1,52 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.type;
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/type/date;date";
+option java_multiple_files = true;
+option java_outer_classname = "DateProto";
+option java_package = "com.google.type";
+option objc_class_prefix = "GTP";
+
+// Represents a whole or partial calendar date, such as a birthday. The time of
+// day and time zone are either specified elsewhere or are insignificant. The
+// date is relative to the Gregorian Calendar. This can represent one of the
+// following:
+//
+// * A full date, with non-zero year, month, and day values
+// * A month and day value, with a zero year, such as an anniversary
+// * A year on its own, with zero month and day values
+// * A year and month value, with a zero day, such as a credit card expiration
+// date
+//
+// Related types are [google.type.TimeOfDay][google.type.TimeOfDay] and
+// `google.protobuf.Timestamp`.
+message Date {
+ // Year of the date. Must be from 1 to 9999, or 0 to specify a date without
+ // a year.
+ int32 year = 1;
+
+ // Month of a year. Must be from 1 to 12, or 0 to specify a year without a
+ // month and day.
+ int32 month = 2;
+
+ // Day of a month. Must be from 1 to 31 and valid for the year and month, or 0
+ // to specify a year by itself or a year and month where the day isn't
+ // significant.
+ int32 day = 3;
+}
diff --git a/Lib/site-packages/google/type/date_pb2.py b/Lib/site-packages/google/type/date_pb2.py
new file mode 100644
index 0000000..8c1da5f
--- /dev/null
+++ b/Lib/site-packages/google/type/date_pb2.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/type/date.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x16google/type/date.proto\x12\x0bgoogle.type"0\n\x04\x44\x61te\x12\x0c\n\x04year\x18\x01 \x01(\x05\x12\r\n\x05month\x18\x02 \x01(\x05\x12\x0b\n\x03\x64\x61y\x18\x03 \x01(\x05\x42]\n\x0f\x63om.google.typeB\tDateProtoP\x01Z4google.golang.org/genproto/googleapis/type/date;date\xf8\x01\x01\xa2\x02\x03GTPb\x06proto3'
+)
+
+
+_DATE = DESCRIPTOR.message_types_by_name["Date"]
+Date = _reflection.GeneratedProtocolMessageType(
+ "Date",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _DATE,
+ "__module__": "google.type.date_pb2"
+ # @@protoc_insertion_point(class_scope:google.type.Date)
+ },
+)
+_sym_db.RegisterMessage(Date)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\017com.google.typeB\tDateProtoP\001Z4google.golang.org/genproto/googleapis/type/date;date\370\001\001\242\002\003GTP"
+ _DATE._serialized_start = 39
+ _DATE._serialized_end = 87
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/type/datetime.proto b/Lib/site-packages/google/type/datetime.proto
new file mode 100644
index 0000000..cfed85d
--- /dev/null
+++ b/Lib/site-packages/google/type/datetime.proto
@@ -0,0 +1,104 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.type;
+
+import "google/protobuf/duration.proto";
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/type/datetime;datetime";
+option java_multiple_files = true;
+option java_outer_classname = "DateTimeProto";
+option java_package = "com.google.type";
+option objc_class_prefix = "GTP";
+
+// Represents civil time (or occasionally physical time).
+//
+// This type can represent a civil time in one of a few possible ways:
+//
+// * When utc_offset is set and time_zone is unset: a civil time on a calendar
+// day with a particular offset from UTC.
+// * When time_zone is set and utc_offset is unset: a civil time on a calendar
+// day in a particular time zone.
+// * When neither time_zone nor utc_offset is set: a civil time on a calendar
+// day in local time.
+//
+// The date is relative to the Proleptic Gregorian Calendar.
+//
+// If year is 0, the DateTime is considered not to have a specific year. month
+// and day must have valid, non-zero values.
+//
+// This type may also be used to represent a physical time if all the date and
+// time fields are set and either case of the `time_offset` oneof is set.
+// Consider using `Timestamp` message for physical time instead. If your use
+// case also would like to store the user's timezone, that can be done in
+// another field.
+//
+// This type is more flexible than some applications may want. Make sure to
+// document and validate your application's limitations.
+message DateTime {
+ // Optional. Year of date. Must be from 1 to 9999, or 0 if specifying a
+ // datetime without a year.
+ int32 year = 1;
+
+ // Required. Month of year. Must be from 1 to 12.
+ int32 month = 2;
+
+ // Required. Day of month. Must be from 1 to 31 and valid for the year and
+ // month.
+ int32 day = 3;
+
+ // Required. Hours of day in 24 hour format. Should be from 0 to 23. An API
+ // may choose to allow the value "24:00:00" for scenarios like business
+ // closing time.
+ int32 hours = 4;
+
+ // Required. Minutes of hour of day. Must be from 0 to 59.
+ int32 minutes = 5;
+
+ // Required. Seconds of minutes of the time. Must normally be from 0 to 59. An
+ // API may allow the value 60 if it allows leap-seconds.
+ int32 seconds = 6;
+
+ // Required. Fractions of seconds in nanoseconds. Must be from 0 to
+ // 999,999,999.
+ int32 nanos = 7;
+
+ // Optional. Specifies either the UTC offset or the time zone of the DateTime.
+ // Choose carefully between them, considering that time zone data may change
+ // in the future (for example, a country modifies their DST start/end dates,
+ // and future DateTimes in the affected range had already been stored).
+ // If omitted, the DateTime is considered to be in local time.
+ oneof time_offset {
+ // UTC offset. Must be whole seconds, between -18 hours and +18 hours.
+ // For example, a UTC offset of -4:00 would be represented as
+ // { seconds: -14400 }.
+ google.protobuf.Duration utc_offset = 8;
+
+ // Time zone.
+ TimeZone time_zone = 9;
+ }
+}
+
+// Represents a time zone from the
+// [IANA Time Zone Database](https://www.iana.org/time-zones).
+message TimeZone {
+ // IANA Time Zone Database time zone, e.g. "America/New_York".
+ string id = 1;
+
+ // Optional. IANA Time Zone Database version number, e.g. "2019a".
+ string version = 2;
+}
diff --git a/Lib/site-packages/google/type/datetime_pb2.py b/Lib/site-packages/google/type/datetime_pb2.py
new file mode 100644
index 0000000..43829b0
--- /dev/null
+++ b/Lib/site-packages/google/type/datetime_pb2.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/type/datetime.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x1agoogle/type/datetime.proto\x12\x0bgoogle.type\x1a\x1egoogle/protobuf/duration.proto"\xe0\x01\n\x08\x44\x61teTime\x12\x0c\n\x04year\x18\x01 \x01(\x05\x12\r\n\x05month\x18\x02 \x01(\x05\x12\x0b\n\x03\x64\x61y\x18\x03 \x01(\x05\x12\r\n\x05hours\x18\x04 \x01(\x05\x12\x0f\n\x07minutes\x18\x05 \x01(\x05\x12\x0f\n\x07seconds\x18\x06 \x01(\x05\x12\r\n\x05nanos\x18\x07 \x01(\x05\x12/\n\nutc_offset\x18\x08 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x12*\n\ttime_zone\x18\t \x01(\x0b\x32\x15.google.type.TimeZoneH\x00\x42\r\n\x0btime_offset"\'\n\x08TimeZone\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\tBi\n\x0f\x63om.google.typeB\rDateTimeProtoP\x01Zgoogle.golang.org/genproto/googleapis/type/dayofweek;dayofweek\xa2\x02\x03GTPb\x06proto3"
+)
+
+_DAYOFWEEK = DESCRIPTOR.enum_types_by_name["DayOfWeek"]
+DayOfWeek = enum_type_wrapper.EnumTypeWrapper(_DAYOFWEEK)
+DAY_OF_WEEK_UNSPECIFIED = 0
+MONDAY = 1
+TUESDAY = 2
+WEDNESDAY = 3
+THURSDAY = 4
+FRIDAY = 5
+SATURDAY = 6
+SUNDAY = 7
+
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\017com.google.typeB\016DayOfWeekProtoP\001Z>google.golang.org/genproto/googleapis/type/dayofweek;dayofweek\242\002\003GTP"
+ _DAYOFWEEK._serialized_start = 45
+ _DAYOFWEEK._serialized_end = 177
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/type/decimal.proto b/Lib/site-packages/google/type/decimal.proto
new file mode 100644
index 0000000..beb18a5
--- /dev/null
+++ b/Lib/site-packages/google/type/decimal.proto
@@ -0,0 +1,95 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.type;
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/type/decimal;decimal";
+option java_multiple_files = true;
+option java_outer_classname = "DecimalProto";
+option java_package = "com.google.type";
+option objc_class_prefix = "GTP";
+
+// A representation of a decimal value, such as 2.5. Clients may convert values
+// into language-native decimal formats, such as Java's [BigDecimal][] or
+// Python's [decimal.Decimal][].
+//
+// [BigDecimal]:
+// https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/math/BigDecimal.html
+// [decimal.Decimal]: https://docs.python.org/3/library/decimal.html
+message Decimal {
+ // The decimal value, as a string.
+ //
+ // The string representation consists of an optional sign, `+` (`U+002B`)
+ // or `-` (`U+002D`), followed by a sequence of zero or more decimal digits
+ // ("the integer"), optionally followed by a fraction, optionally followed
+ // by an exponent.
+ //
+ // The fraction consists of a decimal point followed by zero or more decimal
+ // digits. The string must contain at least one digit in either the integer
+ // or the fraction. The number formed by the sign, the integer and the
+ // fraction is referred to as the significand.
+ //
+ // The exponent consists of the character `e` (`U+0065`) or `E` (`U+0045`)
+ // followed by one or more decimal digits.
+ //
+ // Services **should** normalize decimal values before storing them by:
+ //
+ // - Removing an explicitly-provided `+` sign (`+2.5` -> `2.5`).
+ // - Replacing a zero-length integer value with `0` (`.5` -> `0.5`).
+ // - Coercing the exponent character to lower-case (`2.5E8` -> `2.5e8`).
+ // - Removing an explicitly-provided zero exponent (`2.5e0` -> `2.5`).
+ //
+ // Services **may** perform additional normalization based on its own needs
+ // and the internal decimal implementation selected, such as shifting the
+ // decimal point and exponent value together (example: `2.5e-1` <-> `0.25`).
+ // Additionally, services **may** preserve trailing zeroes in the fraction
+ // to indicate increased precision, but are not required to do so.
+ //
+ // Note that only the `.` character is supported to divide the integer
+ // and the fraction; `,` **should not** be supported regardless of locale.
+ // Additionally, thousand separators **should not** be supported. If a
+ // service does support them, values **must** be normalized.
+ //
+ // The ENBF grammar is:
+ //
+ // DecimalString =
+ // [Sign] Significand [Exponent];
+ //
+ // Sign = '+' | '-';
+ //
+ // Significand =
+ // Digits ['.'] [Digits] | [Digits] '.' Digits;
+ //
+ // Exponent = ('e' | 'E') [Sign] Digits;
+ //
+ // Digits = { '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' };
+ //
+ // Services **should** clearly document the range of supported values, the
+ // maximum supported precision (total number of digits), and, if applicable,
+ // the scale (number of digits after the decimal point), as well as how it
+ // behaves when receiving out-of-bounds values.
+ //
+ // Services **may** choose to accept values passed as input even when the
+ // value has a higher precision or scale than the service supports, and
+ // **should** round the value to fit the supported scale. Alternatively, the
+ // service **may** error with `400 Bad Request` (`INVALID_ARGUMENT` in gRPC)
+ // if precision would be lost.
+ //
+ // Services **should** error with `400 Bad Request` (`INVALID_ARGUMENT` in
+ // gRPC) if the service receives a value outside of the supported range.
+ string value = 1;
+}
diff --git a/Lib/site-packages/google/type/decimal_pb2.py b/Lib/site-packages/google/type/decimal_pb2.py
new file mode 100644
index 0000000..8f14e33
--- /dev/null
+++ b/Lib/site-packages/google/type/decimal_pb2.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/type/decimal.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x19google/type/decimal.proto\x12\x0bgoogle.type"\x18\n\x07\x44\x65\x63imal\x12\r\n\x05value\x18\x01 \x01(\tBf\n\x0f\x63om.google.typeB\x0c\x44\x65\x63imalProtoP\x01Z:google.golang.org/genproto/googleapis/type/decimal;decimal\xf8\x01\x01\xa2\x02\x03GTPb\x06proto3'
+)
+
+
+_DECIMAL = DESCRIPTOR.message_types_by_name["Decimal"]
+Decimal = _reflection.GeneratedProtocolMessageType(
+ "Decimal",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _DECIMAL,
+ "__module__": "google.type.decimal_pb2"
+ # @@protoc_insertion_point(class_scope:google.type.Decimal)
+ },
+)
+_sym_db.RegisterMessage(Decimal)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\017com.google.typeB\014DecimalProtoP\001Z:google.golang.org/genproto/googleapis/type/decimal;decimal\370\001\001\242\002\003GTP"
+ _DECIMAL._serialized_start = 42
+ _DECIMAL._serialized_end = 66
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/type/expr.proto b/Lib/site-packages/google/type/expr.proto
new file mode 100644
index 0000000..af0778c
--- /dev/null
+++ b/Lib/site-packages/google/type/expr.proto
@@ -0,0 +1,73 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.type;
+
+option go_package = "google.golang.org/genproto/googleapis/type/expr;expr";
+option java_multiple_files = true;
+option java_outer_classname = "ExprProto";
+option java_package = "com.google.type";
+option objc_class_prefix = "GTP";
+
+// Represents a textual expression in the Common Expression Language (CEL)
+// syntax. CEL is a C-like expression language. The syntax and semantics of CEL
+// are documented at https://github.com/google/cel-spec.
+//
+// Example (Comparison):
+//
+// title: "Summary size limit"
+// description: "Determines if a summary is less than 100 chars"
+// expression: "document.summary.size() < 100"
+//
+// Example (Equality):
+//
+// title: "Requestor is owner"
+// description: "Determines if requestor is the document owner"
+// expression: "document.owner == request.auth.claims.email"
+//
+// Example (Logic):
+//
+// title: "Public documents"
+// description: "Determine whether the document should be publicly visible"
+// expression: "document.type != 'private' && document.type != 'internal'"
+//
+// Example (Data Manipulation):
+//
+// title: "Notification string"
+// description: "Create a notification string with a timestamp."
+// expression: "'New message received at ' + string(document.create_time)"
+//
+// The exact variables and functions that may be referenced within an expression
+// are determined by the service that evaluates it. See the service
+// documentation for additional information.
+message Expr {
+ // Textual representation of an expression in Common Expression Language
+ // syntax.
+ string expression = 1;
+
+ // Optional. Title for the expression, i.e. a short string describing
+ // its purpose. This can be used e.g. in UIs which allow to enter the
+ // expression.
+ string title = 2;
+
+ // Optional. Description of the expression. This is a longer text which
+ // describes the expression, e.g. when hovered over it in a UI.
+ string description = 3;
+
+ // Optional. String indicating the location of the expression for error
+ // reporting, e.g. a file name and a position in the file.
+ string location = 4;
+}
diff --git a/Lib/site-packages/google/type/expr_pb2.py b/Lib/site-packages/google/type/expr_pb2.py
new file mode 100644
index 0000000..42f0069
--- /dev/null
+++ b/Lib/site-packages/google/type/expr_pb2.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/type/expr.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x16google/type/expr.proto\x12\x0bgoogle.type"P\n\x04\x45xpr\x12\x12\n\nexpression\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x10\n\x08location\x18\x04 \x01(\tBZ\n\x0f\x63om.google.typeB\tExprProtoP\x01Z4google.golang.org/genproto/googleapis/type/expr;expr\xa2\x02\x03GTPb\x06proto3'
+)
+
+
+_EXPR = DESCRIPTOR.message_types_by_name["Expr"]
+Expr = _reflection.GeneratedProtocolMessageType(
+ "Expr",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _EXPR,
+ "__module__": "google.type.expr_pb2"
+ # @@protoc_insertion_point(class_scope:google.type.Expr)
+ },
+)
+_sym_db.RegisterMessage(Expr)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\017com.google.typeB\tExprProtoP\001Z4google.golang.org/genproto/googleapis/type/expr;expr\242\002\003GTP"
+ _EXPR._serialized_start = 39
+ _EXPR._serialized_end = 119
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/type/fraction.proto b/Lib/site-packages/google/type/fraction.proto
new file mode 100644
index 0000000..6c5ae6e
--- /dev/null
+++ b/Lib/site-packages/google/type/fraction.proto
@@ -0,0 +1,33 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.type;
+
+option go_package = "google.golang.org/genproto/googleapis/type/fraction;fraction";
+option java_multiple_files = true;
+option java_outer_classname = "FractionProto";
+option java_package = "com.google.type";
+option objc_class_prefix = "GTP";
+
+// Represents a fraction in terms of a numerator divided by a denominator.
+message Fraction {
+ // The numerator in the fraction, e.g. 2 in 2/3.
+ int64 numerator = 1;
+
+ // The value by which the numerator is divided, e.g. 3 in 2/3. Must be
+ // positive.
+ int64 denominator = 2;
+}
diff --git a/Lib/site-packages/google/type/fraction_pb2.py b/Lib/site-packages/google/type/fraction_pb2.py
new file mode 100644
index 0000000..15c0273
--- /dev/null
+++ b/Lib/site-packages/google/type/fraction_pb2.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/type/fraction.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x1agoogle/type/fraction.proto\x12\x0bgoogle.type"2\n\x08\x46raction\x12\x11\n\tnumerator\x18\x01 \x01(\x03\x12\x13\n\x0b\x64\x65nominator\x18\x02 \x01(\x03\x42\x66\n\x0f\x63om.google.typeB\rFractionProtoP\x01ZWGS84
+// standard. Values must be within normalized ranges.
+message LatLng {
+ // The latitude in degrees. It must be in the range [-90.0, +90.0].
+ double latitude = 1;
+
+ // The longitude in degrees. It must be in the range [-180.0, +180.0].
+ double longitude = 2;
+}
diff --git a/Lib/site-packages/google/type/latlng_pb2.py b/Lib/site-packages/google/type/latlng_pb2.py
new file mode 100644
index 0000000..953dc90
--- /dev/null
+++ b/Lib/site-packages/google/type/latlng_pb2.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/type/latlng.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x18google/type/latlng.proto\x12\x0bgoogle.type"-\n\x06LatLng\x12\x10\n\x08latitude\x18\x01 \x01(\x01\x12\x11\n\tlongitude\x18\x02 \x01(\x01\x42\x63\n\x0f\x63om.google.typeB\x0bLatLngProtoP\x01Z8google.golang.org/genproto/googleapis/type/latlng;latlng\xf8\x01\x01\xa2\x02\x03GTPb\x06proto3'
+)
+
+
+_LATLNG = DESCRIPTOR.message_types_by_name["LatLng"]
+LatLng = _reflection.GeneratedProtocolMessageType(
+ "LatLng",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LATLNG,
+ "__module__": "google.type.latlng_pb2"
+ # @@protoc_insertion_point(class_scope:google.type.LatLng)
+ },
+)
+_sym_db.RegisterMessage(LatLng)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\017com.google.typeB\013LatLngProtoP\001Z8google.golang.org/genproto/googleapis/type/latlng;latlng\370\001\001\242\002\003GTP"
+ _LATLNG._serialized_start = 41
+ _LATLNG._serialized_end = 86
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/type/localized_text.proto b/Lib/site-packages/google/type/localized_text.proto
new file mode 100644
index 0000000..5c6922b
--- /dev/null
+++ b/Lib/site-packages/google/type/localized_text.proto
@@ -0,0 +1,36 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.type;
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/type/localized_text;localized_text";
+option java_multiple_files = true;
+option java_outer_classname = "LocalizedTextProto";
+option java_package = "com.google.type";
+option objc_class_prefix = "GTP";
+
+// Localized variant of a text in a particular language.
+message LocalizedText {
+ // Localized string in the language corresponding to `language_code' below.
+ string text = 1;
+
+ // The text's BCP-47 language code, such as "en-US" or "sr-Latn".
+ //
+ // For more information, see
+ // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
+ string language_code = 2;
+}
diff --git a/Lib/site-packages/google/type/localized_text_pb2.py b/Lib/site-packages/google/type/localized_text_pb2.py
new file mode 100644
index 0000000..c24a7d1
--- /dev/null
+++ b/Lib/site-packages/google/type/localized_text_pb2.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/type/localized_text.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n google/type/localized_text.proto\x12\x0bgoogle.type"4\n\rLocalizedText\x12\x0c\n\x04text\x18\x01 \x01(\t\x12\x15\n\rlanguage_code\x18\x02 \x01(\tBz\n\x0f\x63om.google.typeB\x12LocalizedTextProtoP\x01ZHgoogle.golang.org/genproto/googleapis/type/localized_text;localized_text\xf8\x01\x01\xa2\x02\x03GTPb\x06proto3'
+)
+
+
+_LOCALIZEDTEXT = DESCRIPTOR.message_types_by_name["LocalizedText"]
+LocalizedText = _reflection.GeneratedProtocolMessageType(
+ "LocalizedText",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LOCALIZEDTEXT,
+ "__module__": "google.type.localized_text_pb2"
+ # @@protoc_insertion_point(class_scope:google.type.LocalizedText)
+ },
+)
+_sym_db.RegisterMessage(LocalizedText)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\017com.google.typeB\022LocalizedTextProtoP\001ZHgoogle.golang.org/genproto/googleapis/type/localized_text;localized_text\370\001\001\242\002\003GTP"
+ _LOCALIZEDTEXT._serialized_start = 49
+ _LOCALIZEDTEXT._serialized_end = 101
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/type/money.proto b/Lib/site-packages/google/type/money.proto
new file mode 100644
index 0000000..98d6494
--- /dev/null
+++ b/Lib/site-packages/google/type/money.proto
@@ -0,0 +1,42 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.type;
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/type/money;money";
+option java_multiple_files = true;
+option java_outer_classname = "MoneyProto";
+option java_package = "com.google.type";
+option objc_class_prefix = "GTP";
+
+// Represents an amount of money with its currency type.
+message Money {
+ // The three-letter currency code defined in ISO 4217.
+ string currency_code = 1;
+
+ // The whole units of the amount.
+ // For example if `currencyCode` is `"USD"`, then 1 unit is one US dollar.
+ int64 units = 2;
+
+ // Number of nano (10^-9) units of the amount.
+ // The value must be between -999,999,999 and +999,999,999 inclusive.
+ // If `units` is positive, `nanos` must be positive or zero.
+ // If `units` is zero, `nanos` can be positive, zero, or negative.
+ // If `units` is negative, `nanos` must be negative or zero.
+ // For example $-1.75 is represented as `units`=-1 and `nanos`=-750,000,000.
+ int32 nanos = 3;
+}
diff --git a/Lib/site-packages/google/type/money_pb2.py b/Lib/site-packages/google/type/money_pb2.py
new file mode 100644
index 0000000..f7a6fd8
--- /dev/null
+++ b/Lib/site-packages/google/type/money_pb2.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/type/money.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x17google/type/money.proto\x12\x0bgoogle.type"<\n\x05Money\x12\x15\n\rcurrency_code\x18\x01 \x01(\t\x12\r\n\x05units\x18\x02 \x01(\x03\x12\r\n\x05nanos\x18\x03 \x01(\x05\x42`\n\x0f\x63om.google.typeB\nMoneyProtoP\x01Z6google.golang.org/genproto/googleapis/type/money;money\xf8\x01\x01\xa2\x02\x03GTPb\x06proto3'
+)
+
+
+_MONEY = DESCRIPTOR.message_types_by_name["Money"]
+Money = _reflection.GeneratedProtocolMessageType(
+ "Money",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _MONEY,
+ "__module__": "google.type.money_pb2"
+ # @@protoc_insertion_point(class_scope:google.type.Money)
+ },
+)
+_sym_db.RegisterMessage(Money)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\017com.google.typeB\nMoneyProtoP\001Z6google.golang.org/genproto/googleapis/type/money;money\370\001\001\242\002\003GTP"
+ _MONEY._serialized_start = 40
+ _MONEY._serialized_end = 100
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/type/month.proto b/Lib/site-packages/google/type/month.proto
new file mode 100644
index 0000000..99e7551
--- /dev/null
+++ b/Lib/site-packages/google/type/month.proto
@@ -0,0 +1,65 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.type;
+
+option go_package = "google.golang.org/genproto/googleapis/type/month;month";
+option java_multiple_files = true;
+option java_outer_classname = "MonthProto";
+option java_package = "com.google.type";
+option objc_class_prefix = "GTP";
+
+// Represents a month in the Gregorian calendar.
+enum Month {
+ // The unspecified month.
+ MONTH_UNSPECIFIED = 0;
+
+ // The month of January.
+ JANUARY = 1;
+
+ // The month of February.
+ FEBRUARY = 2;
+
+ // The month of March.
+ MARCH = 3;
+
+ // The month of April.
+ APRIL = 4;
+
+ // The month of May.
+ MAY = 5;
+
+ // The month of June.
+ JUNE = 6;
+
+ // The month of July.
+ JULY = 7;
+
+ // The month of August.
+ AUGUST = 8;
+
+ // The month of September.
+ SEPTEMBER = 9;
+
+ // The month of October.
+ OCTOBER = 10;
+
+ // The month of November.
+ NOVEMBER = 11;
+
+ // The month of December.
+ DECEMBER = 12;
+}
diff --git a/Lib/site-packages/google/type/month_pb2.py b/Lib/site-packages/google/type/month_pb2.py
new file mode 100644
index 0000000..b8d161a
--- /dev/null
+++ b/Lib/site-packages/google/type/month_pb2.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/type/month.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b"\n\x17google/type/month.proto\x12\x0bgoogle.type*\xb0\x01\n\x05Month\x12\x15\n\x11MONTH_UNSPECIFIED\x10\x00\x12\x0b\n\x07JANUARY\x10\x01\x12\x0c\n\x08\x46\x45\x42RUARY\x10\x02\x12\t\n\x05MARCH\x10\x03\x12\t\n\x05\x41PRIL\x10\x04\x12\x07\n\x03MAY\x10\x05\x12\x08\n\x04JUNE\x10\x06\x12\x08\n\x04JULY\x10\x07\x12\n\n\x06\x41UGUST\x10\x08\x12\r\n\tSEPTEMBER\x10\t\x12\x0b\n\x07OCTOBER\x10\n\x12\x0c\n\x08NOVEMBER\x10\x0b\x12\x0c\n\x08\x44\x45\x43\x45MBER\x10\x0c\x42]\n\x0f\x63om.google.typeB\nMonthProtoP\x01Z6google.golang.org/genproto/googleapis/type/month;month\xa2\x02\x03GTPb\x06proto3"
+)
+
+_MONTH = DESCRIPTOR.enum_types_by_name["Month"]
+Month = enum_type_wrapper.EnumTypeWrapper(_MONTH)
+MONTH_UNSPECIFIED = 0
+JANUARY = 1
+FEBRUARY = 2
+MARCH = 3
+APRIL = 4
+MAY = 5
+JUNE = 6
+JULY = 7
+AUGUST = 8
+SEPTEMBER = 9
+OCTOBER = 10
+NOVEMBER = 11
+DECEMBER = 12
+
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\017com.google.typeB\nMonthProtoP\001Z6google.golang.org/genproto/googleapis/type/month;month\242\002\003GTP"
+ _MONTH._serialized_start = 41
+ _MONTH._serialized_end = 217
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/type/phone_number.proto b/Lib/site-packages/google/type/phone_number.proto
new file mode 100644
index 0000000..7bbb7d8
--- /dev/null
+++ b/Lib/site-packages/google/type/phone_number.proto
@@ -0,0 +1,113 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.type;
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/type/phone_number;phone_number";
+option java_multiple_files = true;
+option java_outer_classname = "PhoneNumberProto";
+option java_package = "com.google.type";
+option objc_class_prefix = "GTP";
+
+// An object representing a phone number, suitable as an API wire format.
+//
+// This representation:
+//
+// - should not be used for locale-specific formatting of a phone number, such
+// as "+1 (650) 253-0000 ext. 123"
+//
+// - is not designed for efficient storage
+// - may not be suitable for dialing - specialized libraries (see references)
+// should be used to parse the number for that purpose
+//
+// To do something meaningful with this number, such as format it for various
+// use-cases, convert it to an `i18n.phonenumbers.PhoneNumber` object first.
+//
+// For instance, in Java this would be:
+//
+// com.google.type.PhoneNumber wireProto =
+// com.google.type.PhoneNumber.newBuilder().build();
+// com.google.i18n.phonenumbers.Phonenumber.PhoneNumber phoneNumber =
+// PhoneNumberUtil.getInstance().parse(wireProto.getE164Number(), "ZZ");
+// if (!wireProto.getExtension().isEmpty()) {
+// phoneNumber.setExtension(wireProto.getExtension());
+// }
+//
+// Reference(s):
+// - https://github.com/google/libphonenumber
+message PhoneNumber {
+ // An object representing a short code, which is a phone number that is
+ // typically much shorter than regular phone numbers and can be used to
+ // address messages in MMS and SMS systems, as well as for abbreviated dialing
+ // (e.g. "Text 611 to see how many minutes you have remaining on your plan.").
+ //
+ // Short codes are restricted to a region and are not internationally
+ // dialable, which means the same short code can exist in different regions,
+ // with different usage and pricing, even if those regions share the same
+ // country calling code (e.g. US and CA).
+ message ShortCode {
+ // Required. The BCP-47 region code of the location where calls to this
+ // short code can be made, such as "US" and "BB".
+ //
+ // Reference(s):
+ // - http://www.unicode.org/reports/tr35/#unicode_region_subtag
+ string region_code = 1;
+
+ // Required. The short code digits, without a leading plus ('+') or country
+ // calling code, e.g. "611".
+ string number = 2;
+ }
+
+ // Required. Either a regular number, or a short code. New fields may be
+ // added to the oneof below in the future, so clients should ignore phone
+ // numbers for which none of the fields they coded against are set.
+ oneof kind {
+ // The phone number, represented as a leading plus sign ('+'), followed by a
+ // phone number that uses a relaxed ITU E.164 format consisting of the
+ // country calling code (1 to 3 digits) and the subscriber number, with no
+ // additional spaces or formatting, e.g.:
+ // - correct: "+15552220123"
+ // - incorrect: "+1 (555) 222-01234 x123".
+ //
+ // The ITU E.164 format limits the latter to 12 digits, but in practice not
+ // all countries respect that, so we relax that restriction here.
+ // National-only numbers are not allowed.
+ //
+ // References:
+ // - https://www.itu.int/rec/T-REC-E.164-201011-I
+ // - https://en.wikipedia.org/wiki/E.164.
+ // - https://en.wikipedia.org/wiki/List_of_country_calling_codes
+ string e164_number = 1;
+
+ // A short code.
+ //
+ // Reference(s):
+ // - https://en.wikipedia.org/wiki/Short_code
+ ShortCode short_code = 2;
+ }
+
+ // The phone number's extension. The extension is not standardized in ITU
+ // recommendations, except for being defined as a series of numbers with a
+ // maximum length of 40 digits. Other than digits, some other dialing
+ // characters such as ',' (indicating a wait) or '#' may be stored here.
+ //
+ // Note that no regions currently use extensions with short codes, so this
+ // field is normally only set in conjunction with an E.164 number. It is held
+ // separately from the E.164 number to allow for short code extensions in the
+ // future.
+ string extension = 3;
+}
diff --git a/Lib/site-packages/google/type/phone_number_pb2.py b/Lib/site-packages/google/type/phone_number_pb2.py
new file mode 100644
index 0000000..171ff5b
--- /dev/null
+++ b/Lib/site-packages/google/type/phone_number_pb2.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/type/phone_number.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x1egoogle/type/phone_number.proto\x12\x0bgoogle.type"\xab\x01\n\x0bPhoneNumber\x12\x15\n\x0b\x65\x31\x36\x34_number\x18\x01 \x01(\tH\x00\x12\x38\n\nshort_code\x18\x02 \x01(\x0b\x32".google.type.PhoneNumber.ShortCodeH\x00\x12\x11\n\textension\x18\x03 \x01(\t\x1a\x30\n\tShortCode\x12\x13\n\x0bregion_code\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\tB\x06\n\x04kindBt\n\x0f\x63om.google.typeB\x10PhoneNumberProtoP\x01ZDgoogle.golang.org/genproto/googleapis/type/phone_number;phone_number\xf8\x01\x01\xa2\x02\x03GTPb\x06proto3'
+)
+
+
+_PHONENUMBER = DESCRIPTOR.message_types_by_name["PhoneNumber"]
+_PHONENUMBER_SHORTCODE = _PHONENUMBER.nested_types_by_name["ShortCode"]
+PhoneNumber = _reflection.GeneratedProtocolMessageType(
+ "PhoneNumber",
+ (_message.Message,),
+ {
+ "ShortCode": _reflection.GeneratedProtocolMessageType(
+ "ShortCode",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _PHONENUMBER_SHORTCODE,
+ "__module__": "google.type.phone_number_pb2"
+ # @@protoc_insertion_point(class_scope:google.type.PhoneNumber.ShortCode)
+ },
+ ),
+ "DESCRIPTOR": _PHONENUMBER,
+ "__module__": "google.type.phone_number_pb2"
+ # @@protoc_insertion_point(class_scope:google.type.PhoneNumber)
+ },
+)
+_sym_db.RegisterMessage(PhoneNumber)
+_sym_db.RegisterMessage(PhoneNumber.ShortCode)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\017com.google.typeB\020PhoneNumberProtoP\001ZDgoogle.golang.org/genproto/googleapis/type/phone_number;phone_number\370\001\001\242\002\003GTP"
+ _PHONENUMBER._serialized_start = 48
+ _PHONENUMBER._serialized_end = 219
+ _PHONENUMBER_SHORTCODE._serialized_start = 163
+ _PHONENUMBER_SHORTCODE._serialized_end = 211
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/type/postal_address.proto b/Lib/site-packages/google/type/postal_address.proto
new file mode 100644
index 0000000..c57c7c3
--- /dev/null
+++ b/Lib/site-packages/google/type/postal_address.proto
@@ -0,0 +1,134 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.type;
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/type/postaladdress;postaladdress";
+option java_multiple_files = true;
+option java_outer_classname = "PostalAddressProto";
+option java_package = "com.google.type";
+option objc_class_prefix = "GTP";
+
+// Represents a postal address, e.g. for postal delivery or payments addresses.
+// Given a postal address, a postal service can deliver items to a premise, P.O.
+// Box or similar.
+// It is not intended to model geographical locations (roads, towns,
+// mountains).
+//
+// In typical usage an address would be created via user input or from importing
+// existing data, depending on the type of process.
+//
+// Advice on address input / editing:
+// - Use an i18n-ready address widget such as
+// https://github.com/google/libaddressinput)
+// - Users should not be presented with UI elements for input or editing of
+// fields outside countries where that field is used.
+//
+// For more guidance on how to use this schema, please see:
+// https://support.google.com/business/answer/6397478
+message PostalAddress {
+ // The schema revision of the `PostalAddress`. This must be set to 0, which is
+ // the latest revision.
+ //
+ // All new revisions **must** be backward compatible with old revisions.
+ int32 revision = 1;
+
+ // Required. CLDR region code of the country/region of the address. This
+ // is never inferred and it is up to the user to ensure the value is
+ // correct. See http://cldr.unicode.org/ and
+ // http://www.unicode.org/cldr/charts/30/supplemental/territory_information.html
+ // for details. Example: "CH" for Switzerland.
+ string region_code = 2;
+
+ // Optional. BCP-47 language code of the contents of this address (if
+ // known). This is often the UI language of the input form or is expected
+ // to match one of the languages used in the address' country/region, or their
+ // transliterated equivalents.
+ // This can affect formatting in certain countries, but is not critical
+ // to the correctness of the data and will never affect any validation or
+ // other non-formatting related operations.
+ //
+ // If this value is not known, it should be omitted (rather than specifying a
+ // possibly incorrect default).
+ //
+ // Examples: "zh-Hant", "ja", "ja-Latn", "en".
+ string language_code = 3;
+
+ // Optional. Postal code of the address. Not all countries use or require
+ // postal codes to be present, but where they are used, they may trigger
+ // additional validation with other parts of the address (e.g. state/zip
+ // validation in the U.S.A.).
+ string postal_code = 4;
+
+ // Optional. Additional, country-specific, sorting code. This is not used
+ // in most regions. Where it is used, the value is either a string like
+ // "CEDEX", optionally followed by a number (e.g. "CEDEX 7"), or just a number
+ // alone, representing the "sector code" (Jamaica), "delivery area indicator"
+ // (Malawi) or "post office indicator" (e.g. Côte d'Ivoire).
+ string sorting_code = 5;
+
+ // Optional. Highest administrative subdivision which is used for postal
+ // addresses of a country or region.
+ // For example, this can be a state, a province, an oblast, or a prefecture.
+ // Specifically, for Spain this is the province and not the autonomous
+ // community (e.g. "Barcelona" and not "Catalonia").
+ // Many countries don't use an administrative area in postal addresses. E.g.
+ // in Switzerland this should be left unpopulated.
+ string administrative_area = 6;
+
+ // Optional. Generally refers to the city/town portion of the address.
+ // Examples: US city, IT comune, UK post town.
+ // In regions of the world where localities are not well defined or do not fit
+ // into this structure well, leave locality empty and use address_lines.
+ string locality = 7;
+
+ // Optional. Sublocality of the address.
+ // For example, this can be neighborhoods, boroughs, districts.
+ string sublocality = 8;
+
+ // Unstructured address lines describing the lower levels of an address.
+ //
+ // Because values in address_lines do not have type information and may
+ // sometimes contain multiple values in a single field (e.g.
+ // "Austin, TX"), it is important that the line order is clear. The order of
+ // address lines should be "envelope order" for the country/region of the
+ // address. In places where this can vary (e.g. Japan), address_language is
+ // used to make it explicit (e.g. "ja" for large-to-small ordering and
+ // "ja-Latn" or "en" for small-to-large). This way, the most specific line of
+ // an address can be selected based on the language.
+ //
+ // The minimum permitted structural representation of an address consists
+ // of a region_code with all remaining information placed in the
+ // address_lines. It would be possible to format such an address very
+ // approximately without geocoding, but no semantic reasoning could be
+ // made about any of the address components until it was at least
+ // partially resolved.
+ //
+ // Creating an address only containing a region_code and address_lines, and
+ // then geocoding is the recommended way to handle completely unstructured
+ // addresses (as opposed to guessing which parts of the address should be
+ // localities or administrative areas).
+ repeated string address_lines = 9;
+
+ // Optional. The recipient at the address.
+ // This field may, under certain circumstances, contain multiline information.
+ // For example, it might contain "care of" information.
+ repeated string recipients = 10;
+
+ // Optional. The name of the organization at the address.
+ string organization = 11;
+}
diff --git a/Lib/site-packages/google/type/postal_address_pb2.py b/Lib/site-packages/google/type/postal_address_pb2.py
new file mode 100644
index 0000000..cbd8fbf
--- /dev/null
+++ b/Lib/site-packages/google/type/postal_address_pb2.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/type/postal_address.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n google/type/postal_address.proto\x12\x0bgoogle.type"\xfd\x01\n\rPostalAddress\x12\x10\n\x08revision\x18\x01 \x01(\x05\x12\x13\n\x0bregion_code\x18\x02 \x01(\t\x12\x15\n\rlanguage_code\x18\x03 \x01(\t\x12\x13\n\x0bpostal_code\x18\x04 \x01(\t\x12\x14\n\x0csorting_code\x18\x05 \x01(\t\x12\x1b\n\x13\x61\x64ministrative_area\x18\x06 \x01(\t\x12\x10\n\x08locality\x18\x07 \x01(\t\x12\x13\n\x0bsublocality\x18\x08 \x01(\t\x12\x15\n\raddress_lines\x18\t \x03(\t\x12\x12\n\nrecipients\x18\n \x03(\t\x12\x14\n\x0corganization\x18\x0b \x01(\tBx\n\x0f\x63om.google.typeB\x12PostalAddressProtoP\x01ZFgoogle.golang.org/genproto/googleapis/type/postaladdress;postaladdress\xf8\x01\x01\xa2\x02\x03GTPb\x06proto3'
+)
+
+
+_POSTALADDRESS = DESCRIPTOR.message_types_by_name["PostalAddress"]
+PostalAddress = _reflection.GeneratedProtocolMessageType(
+ "PostalAddress",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _POSTALADDRESS,
+ "__module__": "google.type.postal_address_pb2"
+ # @@protoc_insertion_point(class_scope:google.type.PostalAddress)
+ },
+)
+_sym_db.RegisterMessage(PostalAddress)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\017com.google.typeB\022PostalAddressProtoP\001ZFgoogle.golang.org/genproto/googleapis/type/postaladdress;postaladdress\370\001\001\242\002\003GTP"
+ _POSTALADDRESS._serialized_start = 50
+ _POSTALADDRESS._serialized_end = 303
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/type/quaternion.proto b/Lib/site-packages/google/type/quaternion.proto
new file mode 100644
index 0000000..dfb822d
--- /dev/null
+++ b/Lib/site-packages/google/type/quaternion.proto
@@ -0,0 +1,94 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.type;
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/type/quaternion;quaternion";
+option java_multiple_files = true;
+option java_outer_classname = "QuaternionProto";
+option java_package = "com.google.type";
+option objc_class_prefix = "GTP";
+
+// A quaternion is defined as the quotient of two directed lines in a
+// three-dimensional space or equivalently as the quotient of two Euclidean
+// vectors (https://en.wikipedia.org/wiki/Quaternion).
+//
+// Quaternions are often used in calculations involving three-dimensional
+// rotations (https://en.wikipedia.org/wiki/Quaternions_and_spatial_rotation),
+// as they provide greater mathematical robustness by avoiding the gimbal lock
+// problems that can be encountered when using Euler angles
+// (https://en.wikipedia.org/wiki/Gimbal_lock).
+//
+// Quaternions are generally represented in this form:
+//
+// w + xi + yj + zk
+//
+// where x, y, z, and w are real numbers, and i, j, and k are three imaginary
+// numbers.
+//
+// Our naming choice `(x, y, z, w)` comes from the desire to avoid confusion for
+// those interested in the geometric properties of the quaternion in the 3D
+// Cartesian space. Other texts often use alternative names or subscripts, such
+// as `(a, b, c, d)`, `(1, i, j, k)`, or `(0, 1, 2, 3)`, which are perhaps
+// better suited for mathematical interpretations.
+//
+// To avoid any confusion, as well as to maintain compatibility with a large
+// number of software libraries, the quaternions represented using the protocol
+// buffer below *must* follow the Hamilton convention, which defines `ij = k`
+// (i.e. a right-handed algebra), and therefore:
+//
+// i^2 = j^2 = k^2 = ijk = −1
+// ij = −ji = k
+// jk = −kj = i
+// ki = −ik = j
+//
+// Please DO NOT use this to represent quaternions that follow the JPL
+// convention, or any of the other quaternion flavors out there.
+//
+// Definitions:
+//
+// - Quaternion norm (or magnitude): `sqrt(x^2 + y^2 + z^2 + w^2)`.
+// - Unit (or normalized) quaternion: a quaternion whose norm is 1.
+// - Pure quaternion: a quaternion whose scalar component (`w`) is 0.
+// - Rotation quaternion: a unit quaternion used to represent rotation.
+// - Orientation quaternion: a unit quaternion used to represent orientation.
+//
+// A quaternion can be normalized by dividing it by its norm. The resulting
+// quaternion maintains the same direction, but has a norm of 1, i.e. it moves
+// on the unit sphere. This is generally necessary for rotation and orientation
+// quaternions, to avoid rounding errors:
+// https://en.wikipedia.org/wiki/Rotation_formalisms_in_three_dimensions
+//
+// Note that `(x, y, z, w)` and `(-x, -y, -z, -w)` represent the same rotation,
+// but normalization would be even more useful, e.g. for comparison purposes, if
+// it would produce a unique representation. It is thus recommended that `w` be
+// kept positive, which can be achieved by changing all the signs when `w` is
+// negative.
+//
+message Quaternion {
+ // The x component.
+ double x = 1;
+
+ // The y component.
+ double y = 2;
+
+ // The z component.
+ double z = 3;
+
+ // The scalar component.
+ double w = 4;
+}
diff --git a/Lib/site-packages/google/type/quaternion_pb2.py b/Lib/site-packages/google/type/quaternion_pb2.py
new file mode 100644
index 0000000..78a843b
--- /dev/null
+++ b/Lib/site-packages/google/type/quaternion_pb2.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/type/quaternion.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x1cgoogle/type/quaternion.proto\x12\x0bgoogle.type"8\n\nQuaternion\x12\t\n\x01x\x18\x01 \x01(\x01\x12\t\n\x01y\x18\x02 \x01(\x01\x12\t\n\x01z\x18\x03 \x01(\x01\x12\t\n\x01w\x18\x04 \x01(\x01\x42o\n\x0f\x63om.google.typeB\x0fQuaternionProtoP\x01Z@google.golang.org/genproto/googleapis/type/quaternion;quaternion\xf8\x01\x01\xa2\x02\x03GTPb\x06proto3'
+)
+
+
+_QUATERNION = DESCRIPTOR.message_types_by_name["Quaternion"]
+Quaternion = _reflection.GeneratedProtocolMessageType(
+ "Quaternion",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _QUATERNION,
+ "__module__": "google.type.quaternion_pb2"
+ # @@protoc_insertion_point(class_scope:google.type.Quaternion)
+ },
+)
+_sym_db.RegisterMessage(Quaternion)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\017com.google.typeB\017QuaternionProtoP\001Z@google.golang.org/genproto/googleapis/type/quaternion;quaternion\370\001\001\242\002\003GTP"
+ _QUATERNION._serialized_start = 45
+ _QUATERNION._serialized_end = 101
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google/type/timeofday.proto b/Lib/site-packages/google/type/timeofday.proto
new file mode 100644
index 0000000..5cb48aa
--- /dev/null
+++ b/Lib/site-packages/google/type/timeofday.proto
@@ -0,0 +1,44 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.type;
+
+option cc_enable_arenas = true;
+option go_package = "google.golang.org/genproto/googleapis/type/timeofday;timeofday";
+option java_multiple_files = true;
+option java_outer_classname = "TimeOfDayProto";
+option java_package = "com.google.type";
+option objc_class_prefix = "GTP";
+
+// Represents a time of day. The date and time zone are either not significant
+// or are specified elsewhere. An API may choose to allow leap seconds. Related
+// types are [google.type.Date][google.type.Date] and
+// `google.protobuf.Timestamp`.
+message TimeOfDay {
+ // Hours of day in 24 hour format. Should be from 0 to 23. An API may choose
+ // to allow the value "24:00:00" for scenarios like business closing time.
+ int32 hours = 1;
+
+ // Minutes of hour of day. Must be from 0 to 59.
+ int32 minutes = 2;
+
+ // Seconds of minutes of the time. Must normally be from 0 to 59. An API may
+ // allow the value 60 if it allows leap-seconds.
+ int32 seconds = 3;
+
+ // Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999.
+ int32 nanos = 4;
+}
diff --git a/Lib/site-packages/google/type/timeofday_pb2.py b/Lib/site-packages/google/type/timeofday_pb2.py
new file mode 100644
index 0000000..64d8ab1
--- /dev/null
+++ b/Lib/site-packages/google/type/timeofday_pb2.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/type/timeofday.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+ b'\n\x1bgoogle/type/timeofday.proto\x12\x0bgoogle.type"K\n\tTimeOfDay\x12\r\n\x05hours\x18\x01 \x01(\x05\x12\x0f\n\x07minutes\x18\x02 \x01(\x05\x12\x0f\n\x07seconds\x18\x03 \x01(\x05\x12\r\n\x05nanos\x18\x04 \x01(\x05\x42l\n\x0f\x63om.google.typeB\x0eTimeOfDayProtoP\x01Z>google.golang.org/genproto/googleapis/type/timeofday;timeofday\xf8\x01\x01\xa2\x02\x03GTPb\x06proto3'
+)
+
+
+_TIMEOFDAY = DESCRIPTOR.message_types_by_name["TimeOfDay"]
+TimeOfDay = _reflection.GeneratedProtocolMessageType(
+ "TimeOfDay",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _TIMEOFDAY,
+ "__module__": "google.type.timeofday_pb2"
+ # @@protoc_insertion_point(class_scope:google.type.TimeOfDay)
+ },
+)
+_sym_db.RegisterMessage(TimeOfDay)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b"\n\017com.google.typeB\016TimeOfDayProtoP\001Z>google.golang.org/genproto/googleapis/type/timeofday;timeofday\370\001\001\242\002\003GTP"
+ _TIMEOFDAY._serialized_start = 44
+ _TIMEOFDAY._serialized_end = 119
+# @@protoc_insertion_point(module_scope)
diff --git a/Lib/site-packages/google_api_core-2.17.0.dist-info/INSTALLER b/Lib/site-packages/google_api_core-2.17.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/Lib/site-packages/google_api_core-2.17.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Lib/site-packages/google_api_core-2.17.0.dist-info/LICENSE b/Lib/site-packages/google_api_core-2.17.0.dist-info/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/Lib/site-packages/google_api_core-2.17.0.dist-info/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/Lib/site-packages/google_api_core-2.17.0.dist-info/METADATA b/Lib/site-packages/google_api_core-2.17.0.dist-info/METADATA
new file mode 100644
index 0000000..bedaf76
--- /dev/null
+++ b/Lib/site-packages/google_api_core-2.17.0.dist-info/METADATA
@@ -0,0 +1,69 @@
+Metadata-Version: 2.1
+Name: google-api-core
+Version: 2.17.0
+Summary: Google API client core library
+Home-page: https://github.com/googleapis/python-api-core
+Author: Google LLC
+Author-email: googleapis-packages@google.com
+License: Apache 2.0
+Platform: Posix; MacOS X; Windows
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Internet
+Requires-Python: >=3.7
+License-File: LICENSE
+Requires-Dist: googleapis-common-protos <2.0.dev0,>=1.56.2
+Requires-Dist: protobuf !=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0.dev0,>=3.19.5
+Requires-Dist: google-auth <3.0.dev0,>=2.14.1
+Requires-Dist: requests <3.0.0.dev0,>=2.18.0
+Provides-Extra: grpc
+Requires-Dist: grpcio <2.0dev,>=1.33.2 ; extra == 'grpc'
+Requires-Dist: grpcio-status <2.0.dev0,>=1.33.2 ; extra == 'grpc'
+Requires-Dist: grpcio <2.0dev,>=1.49.1 ; (python_version >= "3.11") and extra == 'grpc'
+Requires-Dist: grpcio-status <2.0.dev0,>=1.49.1 ; (python_version >= "3.11") and extra == 'grpc'
+Provides-Extra: grpcgcp
+Requires-Dist: grpcio-gcp <1.0.dev0,>=0.2.2 ; extra == 'grpcgcp'
+Provides-Extra: grpcio-gcp
+Requires-Dist: grpcio-gcp <1.0.dev0,>=0.2.2 ; extra == 'grpcio-gcp'
+
+Core Library for Google Client Libraries
+========================================
+
+|pypi| |versions|
+
+This library is not meant to stand-alone. Instead it defines
+common helpers used by all Google API clients. For more information, see the
+`documentation`_.
+
+.. |pypi| image:: https://img.shields.io/pypi/v/google-api_core.svg
+ :target: https://pypi.org/project/google-api_core/
+.. |versions| image:: https://img.shields.io/pypi/pyversions/google-api_core.svg
+ :target: https://pypi.org/project/google-api_core/
+.. _documentation: https://googleapis.dev/python/google-api-core/latest
+
+
+Supported Python Versions
+-------------------------
+Python >= 3.7
+
+
+Unsupported Python Versions
+---------------------------
+
+Python == 2.7, Python == 3.5, Python == 3.6.
+
+The last version of this library compatible with Python 2.7 and 3.5 is
+`google-api-core==1.31.1`.
+
+The last version of this library compatible with Python 3.6 is
+`google-api-core==2.8.2`.
diff --git a/Lib/site-packages/google_api_core-2.17.0.dist-info/RECORD b/Lib/site-packages/google_api_core-2.17.0.dist-info/RECORD
new file mode 100644
index 0000000..65bea9f
--- /dev/null
+++ b/Lib/site-packages/google_api_core-2.17.0.dist-info/RECORD
@@ -0,0 +1,106 @@
+google/api_core/__init__.py,sha256=bCgLRZtOkaVlSxTPG_o1x4V0w5FJAWREIlnq3kCfqeY,782
+google/api_core/__pycache__/__init__.cpython-312.pyc,,
+google/api_core/__pycache__/bidi.cpython-312.pyc,,
+google/api_core/__pycache__/client_info.cpython-312.pyc,,
+google/api_core/__pycache__/client_options.cpython-312.pyc,,
+google/api_core/__pycache__/datetime_helpers.cpython-312.pyc,,
+google/api_core/__pycache__/exceptions.cpython-312.pyc,,
+google/api_core/__pycache__/extended_operation.cpython-312.pyc,,
+google/api_core/__pycache__/general_helpers.cpython-312.pyc,,
+google/api_core/__pycache__/grpc_helpers.cpython-312.pyc,,
+google/api_core/__pycache__/grpc_helpers_async.cpython-312.pyc,,
+google/api_core/__pycache__/iam.cpython-312.pyc,,
+google/api_core/__pycache__/operation.cpython-312.pyc,,
+google/api_core/__pycache__/operation_async.cpython-312.pyc,,
+google/api_core/__pycache__/page_iterator.cpython-312.pyc,,
+google/api_core/__pycache__/page_iterator_async.cpython-312.pyc,,
+google/api_core/__pycache__/path_template.cpython-312.pyc,,
+google/api_core/__pycache__/protobuf_helpers.cpython-312.pyc,,
+google/api_core/__pycache__/rest_helpers.cpython-312.pyc,,
+google/api_core/__pycache__/rest_streaming.cpython-312.pyc,,
+google/api_core/__pycache__/retry_async.cpython-312.pyc,,
+google/api_core/__pycache__/timeout.cpython-312.pyc,,
+google/api_core/__pycache__/version.cpython-312.pyc,,
+google/api_core/bidi.py,sha256=wpC9Lthrh62Jxp9WzRas1W78UiHO5OPuHEvx3SHcvgg,27940
+google/api_core/client_info.py,sha256=MQse7unHD1nxqvEQPEMZyr4BH8oRHb9p2DE5dPXfKY0,3744
+google/api_core/client_options.py,sha256=g6PmyvmDOpnOHdQi7ZUjf1Pxq4ikUchvZrd31jYf2x4,5449
+google/api_core/datetime_helpers.py,sha256=5gFi7n0r-xVImQdj6rQKNwk58m2LcMF9WliXGHbBsDA,9034
+google/api_core/exceptions.py,sha256=9OPxGWIA9dsU4Ba2lkQ16sOmvja7Y_sCxt7Tlagr0k0,19326
+google/api_core/extended_operation.py,sha256=r9xSOblNF35lwn2hrrjUQ-f3JDoo0a4Z8xwOy_VkvL0,8632
+google/api_core/future/__init__.py,sha256=7sToxNNu9c_xqcpmO8dbrcSLOOxplnYOOSXjOX9QIXw,702
+google/api_core/future/__pycache__/__init__.cpython-312.pyc,,
+google/api_core/future/__pycache__/_helpers.cpython-312.pyc,,
+google/api_core/future/__pycache__/async_future.cpython-312.pyc,,
+google/api_core/future/__pycache__/base.cpython-312.pyc,,
+google/api_core/future/__pycache__/polling.cpython-312.pyc,,
+google/api_core/future/_helpers.py,sha256=jA6m2L1aqlOJA-9NdC1BDosPksZQ7FmLLYWDOrsQOPc,1248
+google/api_core/future/async_future.py,sha256=7rOK0tzud8MCoUwO9AjF-3OQDtELwhtp2ONltSB3GEI,5355
+google/api_core/future/base.py,sha256=SHyudamSWR7EyUsYaQ-XrGGkLeYClSfXfsHIHSqDIYI,1763
+google/api_core/future/polling.py,sha256=0HUw1bp7ZLgEqMtwsvxIXNMHQbHgsP6TpmpVrMbjJ2I,14349
+google/api_core/gapic_v1/__init__.py,sha256=r6kCwKznSXPTYRdz4C384fscefaw_rXP2bzJdnzEVnw,988
+google/api_core/gapic_v1/__pycache__/__init__.cpython-312.pyc,,
+google/api_core/gapic_v1/__pycache__/client_info.cpython-312.pyc,,
+google/api_core/gapic_v1/__pycache__/config.cpython-312.pyc,,
+google/api_core/gapic_v1/__pycache__/config_async.cpython-312.pyc,,
+google/api_core/gapic_v1/__pycache__/method.cpython-312.pyc,,
+google/api_core/gapic_v1/__pycache__/method_async.cpython-312.pyc,,
+google/api_core/gapic_v1/__pycache__/routing_header.cpython-312.pyc,,
+google/api_core/gapic_v1/client_info.py,sha256=98rwKt-s_jk7ez0KUKEJoARR1RWHClP6NLOAAGon7M4,2215
+google/api_core/gapic_v1/config.py,sha256=5isOOYPSZCXpDcJDJiwmTxGTUo0RjxJJvW2yjqBR4BI,6300
+google/api_core/gapic_v1/config_async.py,sha256=_jrB5Yv6rxxSU6KwzOxWQ-G_x5mXilpSFAgnQ_6ktrU,1728
+google/api_core/gapic_v1/method.py,sha256=SnMqRoKKCRph9xpnQvQ29SGjCd9WVpHEPK60X-uPyWM,9494
+google/api_core/gapic_v1/method_async.py,sha256=f307YYgYYvjQBF5So9S1rP8E7DubCbmhJw5EbkHwMA4,1952
+google/api_core/gapic_v1/routing_header.py,sha256=kJKOYpNS2mgSZa4Qt8Ib2Q5ONfNwpJwbNloVJ8e2wMs,3093
+google/api_core/general_helpers.py,sha256=ZrYwDg7VTgtaQlFk_fCeFTKYZD62JMQdZRhbQhbQL_c,681
+google/api_core/grpc_helpers.py,sha256=h07x2ZQIXDfc1pyamvRRcEnYd9fwL2Kx9lGCrS_BFdQ,23013
+google/api_core/grpc_helpers_async.py,sha256=1ewS441H6o0tvgdgfanL8y9STOseV3evnvstk7kDy-4,12344
+google/api_core/iam.py,sha256=BGz63HtOP5_5oH9Zs93RP0Y6Qshty2eOhFEYj_CoE64,13213
+google/api_core/operation.py,sha256=mHWay2vrNbEliv5YWFzyXBywbQdy_VPW98BALh514PA,13198
+google/api_core/operation_async.py,sha256=XdunwVY6aKA-K0OK-5_dYbqjbvF1DLTYUUL4IOztld4,8046
+google/api_core/operations_v1/__init__.py,sha256=lC3JHtOPlhKWHIPXOEqJH2QrJmMjC3vWQjMuuA5VxfQ,1126
+google/api_core/operations_v1/__pycache__/__init__.cpython-312.pyc,,
+google/api_core/operations_v1/__pycache__/abstract_operations_client.cpython-312.pyc,,
+google/api_core/operations_v1/__pycache__/operations_async_client.cpython-312.pyc,,
+google/api_core/operations_v1/__pycache__/operations_client.cpython-312.pyc,,
+google/api_core/operations_v1/__pycache__/operations_client_config.cpython-312.pyc,,
+google/api_core/operations_v1/__pycache__/pagers.cpython-312.pyc,,
+google/api_core/operations_v1/abstract_operations_client.py,sha256=SWIgFbnAQhcLkUCV1hPAPXAzP4CvbICe5sLDpxXak_s,24551
+google/api_core/operations_v1/operations_async_client.py,sha256=1BENex2y2ovlCHlXR4v5Cfiqk2o36DBWEzPyCCCudbU,14794
+google/api_core/operations_v1/operations_client.py,sha256=-fmbRv_2L_5cJv70WfybRw9EUyLlHB-wTbC-n0Iq4Fg,15274
+google/api_core/operations_v1/operations_client_config.py,sha256=v7B0FiVc5p9HhnpPY1_3FIomFdA-J-4lilomeoC9SkQ,2285
+google/api_core/operations_v1/pagers.py,sha256=BxwldJRuw3HRRjUzCdGI5OUjL7Z-p0iRPKXkjrh7V5A,3143
+google/api_core/operations_v1/transports/__init__.py,sha256=bIzQO8Dy86kAz1OZYu2qDOvdto6vkHcLXiUrGEHot6g,918
+google/api_core/operations_v1/transports/__pycache__/__init__.cpython-312.pyc,,
+google/api_core/operations_v1/transports/__pycache__/base.cpython-312.pyc,,
+google/api_core/operations_v1/transports/__pycache__/rest.cpython-312.pyc,,
+google/api_core/operations_v1/transports/base.py,sha256=0JbrFJCRiAK0333p937vkUJB5J61jN6Q8A04Vrty2Ws,8594
+google/api_core/operations_v1/transports/rest.py,sha256=1Ce15zqU3dKRvRVEw84bpjcMiFEW3bW-G_fCTcN6AAY,19584
+google/api_core/page_iterator.py,sha256=FXMfqbhlVYAEVjpojytYAiUluVNYAVSC41MdfAhHAX4,20330
+google/api_core/page_iterator_async.py,sha256=TbuXorRhP1wcQTD3raBJhWgSJP1JwJO_nCKJphCbVdw,10294
+google/api_core/path_template.py,sha256=Lyqqw8OECuw5O7y9x1BJvfNbYEbmx4lnTGqc6opSyHk,11685
+google/api_core/protobuf_helpers.py,sha256=uGl-E4EuDgw5ahHCZ_UiadxOLz5xGBcLMU_ECvdq5f4,12430
+google/api_core/py.typed,sha256=q8dgH9l1moUXiufHBVjqI0MuJy4Be9a3rNH8Zl_sICA,78
+google/api_core/rest_helpers.py,sha256=2DsInZiHv0sLd9dfLIbEL2vDJQIybWgxlkxnNFahPnI,3529
+google/api_core/rest_streaming.py,sha256=vbbwsYA8GcI_GUSntmE74LRrsZ4brn0QSflHEsfO2j8,4130
+google/api_core/retry/__init__.py,sha256=WhgtLBQO2oK-AehH_AHbGbfWo1IdG5ahUGrs3aFGw0o,2088
+google/api_core/retry/__pycache__/__init__.cpython-312.pyc,,
+google/api_core/retry/__pycache__/retry_base.cpython-312.pyc,,
+google/api_core/retry/__pycache__/retry_streaming.cpython-312.pyc,,
+google/api_core/retry/__pycache__/retry_streaming_async.cpython-312.pyc,,
+google/api_core/retry/__pycache__/retry_unary.cpython-312.pyc,,
+google/api_core/retry/__pycache__/retry_unary_async.cpython-312.pyc,,
+google/api_core/retry/retry_base.py,sha256=WJtEMphRNm1c3ldnuTjCzVu3Xzm-NwjmZhnULpHGVBM,12433
+google/api_core/retry/retry_streaming.py,sha256=kt5ucutVWGYzIsBugqag2DKBwucBmRtkKUvvlKHQ5ew,10860
+google/api_core/retry/retry_streaming_async.py,sha256=7liXfL9o9X9BvDxwFoHdgcClQJq8A9l_Ir5Z_gN4ZvA,14343
+google/api_core/retry/retry_unary.py,sha256=k_0PGsmFwRe4EPUMRHfzrZatDTFwEp9ucPRZDtKKGm4,13338
+google/api_core/retry/retry_unary_async.py,sha256=9mQgLnDqRNBb0enszfE7suqy-5R_C0PQlV3y-s2q_Hw,9415
+google/api_core/retry_async.py,sha256=_r0ROYeQqdATtRMx-q_6o4bPmqFzPyjr_oV3lfloDSM,1514
+google/api_core/timeout.py,sha256=LnElJPn1XJuU8dNZl0YKxw8wqQbGGV94t5K1NiXOGk0,9685
+google/api_core/version.py,sha256=SlP5h60bDPcjVbwPW9SAhEc5clzxZqOU1xEitFwihV8,598
+google_api_core-2.17.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+google_api_core-2.17.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
+google_api_core-2.17.0.dist-info/METADATA,sha256=EH7aK8_p26Gdy6cfktRshbc7QRjTXDMjsn4aS0YHo0I,2684
+google_api_core-2.17.0.dist-info/RECORD,,
+google_api_core-2.17.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google_api_core-2.17.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
+google_api_core-2.17.0.dist-info/top_level.txt,sha256=_1QvSJIhFAGfxb79D6DhB7SUw2X6T4rwnz_LLrbcD3c,7
diff --git a/Lib/site-packages/google_api_core-2.17.0.dist-info/REQUESTED b/Lib/site-packages/google_api_core-2.17.0.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/google_api_core-2.17.0.dist-info/WHEEL b/Lib/site-packages/google_api_core-2.17.0.dist-info/WHEEL
new file mode 100644
index 0000000..ba48cbc
--- /dev/null
+++ b/Lib/site-packages/google_api_core-2.17.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.41.3)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/Lib/site-packages/google_api_core-2.17.0.dist-info/top_level.txt b/Lib/site-packages/google_api_core-2.17.0.dist-info/top_level.txt
new file mode 100644
index 0000000..cb42911
--- /dev/null
+++ b/Lib/site-packages/google_api_core-2.17.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+google
diff --git a/Lib/site-packages/google_api_python_client-2.117.0.dist-info/INSTALLER b/Lib/site-packages/google_api_python_client-2.117.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/Lib/site-packages/google_api_python_client-2.117.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Lib/site-packages/google_api_python_client-2.117.0.dist-info/LICENSE b/Lib/site-packages/google_api_python_client-2.117.0.dist-info/LICENSE
new file mode 100644
index 0000000..261eeb9
--- /dev/null
+++ b/Lib/site-packages/google_api_python_client-2.117.0.dist-info/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/Lib/site-packages/google_api_python_client-2.117.0.dist-info/METADATA b/Lib/site-packages/google_api_python_client-2.117.0.dist-info/METADATA
new file mode 100644
index 0000000..da3f49f
--- /dev/null
+++ b/Lib/site-packages/google_api_python_client-2.117.0.dist-info/METADATA
@@ -0,0 +1,150 @@
+Metadata-Version: 2.1
+Name: google-api-python-client
+Version: 2.117.0
+Summary: Google API Client Library for Python
+Home-page: https://github.com/googleapis/google-api-python-client/
+Author: Google LLC
+Author-email: googleapis-packages@google.com
+License: Apache 2.0
+Keywords: google api client
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Internet :: WWW/HTTP
+Requires-Python: >=3.7
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Requires-Dist: httplib2 <1.dev0,>=0.15.0
+Requires-Dist: google-auth <3.0.0.dev0,>=1.19.0
+Requires-Dist: google-auth-httplib2 >=0.1.0
+Requires-Dist: google-api-core !=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0,<3.0.0.dev0,>=1.31.5
+Requires-Dist: uritemplate <5,>=3.0.1
+
+# Google API Client
+
+[](https://badge.fury.io/py/google-api-python-client)
+
+This is the [Google API Python client library](https://cloud.google.com/apis/docs/client-libraries-explained#google_api_client_libraries)
+for Google's discovery based APIs. To get started, please see the
+[docs folder](https://github.com/googleapis/google-api-python-client/blob/main/docs/README.md).
+
+This library is considered complete and is in maintenance mode. This means
+that we will address critical bugs and security issues but will not add any
+new features.
+
+This library is officially supported by Google. However, the maintainers of
+this repository recommend using [Cloud Client Libraries for Python](https://github.com/googleapis/google-cloud-python),
+where possible, for new code development. For more information, please visit
+[Client Libraries Explained](https://cloud.google.com/apis/docs/client-libraries-explained).
+
+## Version 2.0 Release
+The 2.0 release of `google-api-python-client` includes a substantial reliability
+improvement, compared with 1.x, as discovery documents are now cached in the library
+rather than fetched dynamically. It is highly recommended to upgrade from v1.x to v2.x.
+
+Only python 3.7 and newer is supported. If you are not able to upgrade python, then
+please continue to use version 1.x as we will continue supporting python 2.7+ in
+[v1](https://github.com/googleapis/google-api-python-client/tree/v1).
+
+Discovery documents will no longer be retrieved dynamically when
+you call `discovery.build()`. The discovery documents will instead be retrieved
+from the client library directly. New versions of this library are released weekly.
+As a result of caching the discovery documents, the size of this package is at least
+50 MB larger compared to the previous version.
+
+Please see the [Migration Guide](https://github.com/googleapis/google-api-python-client/blob/main/UPGRADING.md)
+for more information.
+
+## Documentation
+
+See the [docs folder](https://github.com/googleapis/google-api-python-client/blob/main/docs/README.md) for more detailed instructions and additional documentation.
+
+## Other Google API libraries
+
+The maintainers of this repository recommend using
+[Cloud Client Libraries for Python](https://github.com/googleapis/google-cloud-python),
+where possible, for new code development due to the following reasons:
+
+With [Cloud Client Libraries for Python](https://github.com/googleapis/google-cloud-python):
+- There is a separate client library for each API, so you can choose
+which client libraries to download. Whereas, `google-api-python-client` is a
+single client library for all APIs. As a result, the total package size for
+`google-api-python-client` exceeds 50MB.
+- There are stricter controls for breaking changes to the underlying APIs
+as each client library is focused on a specific API.
+- There are more features in these Cloud Client Libraries as each library is
+focused on a specific API, and in some cases, the libraries are owned by team
+who specialized in that API.
+- Developers will benefit from intellisense.
+
+For more information, please visit
+[Client Libraries Explained](https://cloud.google.com/apis/docs/client-libraries-explained).
+
+Although there are many benefits to moving to
+[Cloud Client Libraries for Python](https://github.com/googleapis/google-cloud-python),
+the maintainers want to emphasize that `google-api-python-client` will continue
+to be supported.
+
+For Google Ads API, we recommend using [Google Ads API Client Library for Python](https://github.com/googleads/google-ads-python/).
+
+For Google Firebase Admin API, we recommend using [Firebase Admin Python SDK](https://github.com/firebase/firebase-admin-python).
+
+## Installation
+
+Install this library in a [virtualenv](https://virtualenv.pypa.io/en/latest/) using pip. virtualenv is a tool to
+create isolated Python environments. The basic problem it addresses is one of
+dependencies and versions, and indirectly permissions.
+
+With virtualenv, it's possible to install this library without needing system
+install permissions, and without clashing with the installed system
+dependencies.
+
+### Mac/Linux
+
+```
+pip3 install virtualenv
+virtualenv
+source /bin/activate
+/bin/pip install google-api-python-client
+```
+
+### Windows
+
+```
+pip install virtualenv
+virtualenv
+\Scripts\activate
+\Scripts\pip.exe install google-api-python-client
+```
+
+## Supported Python Versions
+
+Python 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 are fully supported and tested. This library may work on later versions of 3, but we do not currently run tests against those versions.
+
+## Unsupported Python Versions
+
+Python < 3.7
+
+## Third Party Libraries and Dependencies
+
+The following libraries will be installed when you install the client library:
+* [httplib2](https://github.com/httplib2/httplib2)
+* [uritemplate](https://github.com/sigmavirus24/uritemplate)
+
+For development you will also need the following libraries:
+* [WebTest](https://pypi.org/project/WebTest/)
+* [pyopenssl](https://pypi.python.org/pypi/pyOpenSSL)
+
+## Contributing
+
+Please see our [Contribution Guide](https://github.com/googleapis/google-api-python-client/blob/main/CONTRIBUTING.rst).
+In particular, we love pull requests - but please make sure to sign
+the contributor license agreement.
diff --git a/Lib/site-packages/google_api_python_client-2.117.0.dist-info/RECORD b/Lib/site-packages/google_api_python_client-2.117.0.dist-info/RECORD
new file mode 100644
index 0000000..7d3f521
--- /dev/null
+++ b/Lib/site-packages/google_api_python_client-2.117.0.dist-info/RECORD
@@ -0,0 +1,531 @@
+apiclient/__init__.py,sha256=U1SOZ1GeeF3uCr0fzO5Od-rpe17R12-Ppyq25NTSbow,746
+apiclient/__pycache__/__init__.cpython-312.pyc,,
+google_api_python_client-2.117.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+google_api_python_client-2.117.0.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
+google_api_python_client-2.117.0.dist-info/METADATA,sha256=uCq6OnRXEhtfF8KU5iD0D3WsmtrAmwKjW-gwpVIeTMc,6623
+google_api_python_client-2.117.0.dist-info/RECORD,,
+google_api_python_client-2.117.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google_api_python_client-2.117.0.dist-info/WHEEL,sha256=P2T-6epvtXQ2cBOE_U1K4_noqlJFN3tj15djMgEu4NM,110
+google_api_python_client-2.117.0.dist-info/top_level.txt,sha256=lbWgdDwQ3oww0ub0pmZ3fmIYAOTHf18Two4U9CxRlnw,58
+googleapiclient/__init__.py,sha256=kFWxKShJalbnrCuV0klL7mjZaiXfPpjlqmyKyI8yfTU,904
+googleapiclient/__pycache__/__init__.cpython-312.pyc,,
+googleapiclient/__pycache__/_auth.cpython-312.pyc,,
+googleapiclient/__pycache__/_helpers.cpython-312.pyc,,
+googleapiclient/__pycache__/channel.cpython-312.pyc,,
+googleapiclient/__pycache__/discovery.cpython-312.pyc,,
+googleapiclient/__pycache__/errors.cpython-312.pyc,,
+googleapiclient/__pycache__/http.cpython-312.pyc,,
+googleapiclient/__pycache__/mimeparse.cpython-312.pyc,,
+googleapiclient/__pycache__/model.cpython-312.pyc,,
+googleapiclient/__pycache__/sample_tools.cpython-312.pyc,,
+googleapiclient/__pycache__/schema.cpython-312.pyc,,
+googleapiclient/__pycache__/version.cpython-312.pyc,,
+googleapiclient/_auth.py,sha256=QttUwhmp7BmhW5CReaAOVGH6kEtITYmEHhVCVmsnDPs,5736
+googleapiclient/_helpers.py,sha256=iNxILG6iNNFukr7lhaYfVqX4oFM5SERvcsU3JxeOo6I,6723
+googleapiclient/channel.py,sha256=Fc4nxu-RxGkjY_STp9bwJfZaLan6VdX-DNniH-ANSuE,11054
+googleapiclient/discovery.py,sha256=svHPpLLS9GNHa9xGt2zmhxqDLu8krhrNPRS1aabATrE,63247
+googleapiclient/discovery_cache/__init__.py,sha256=ww_vl0vhVLuHSEdRTv3-gq6EDG--Ff7rILYHHFifnzc,2315
+googleapiclient/discovery_cache/__pycache__/__init__.cpython-312.pyc,,
+googleapiclient/discovery_cache/__pycache__/appengine_memcache.cpython-312.pyc,,
+googleapiclient/discovery_cache/__pycache__/base.cpython-312.pyc,,
+googleapiclient/discovery_cache/__pycache__/file_cache.cpython-312.pyc,,
+googleapiclient/discovery_cache/appengine_memcache.py,sha256=6T1pQj-toAhDwfgLuiggFGhxKNGw5y-NnLUzLIF_M4s,1657
+googleapiclient/discovery_cache/base.py,sha256=yCDPtxnbNN-p5_9fzBacC6P3wcUPlaCQIy5v_dXTons,1389
+googleapiclient/discovery_cache/documents/abusiveexperiencereport.v1.json,sha256=URgKewifjcY5ZG9IqQghXYuUsLoRg3s3egFj6FpcgAY,5671
+googleapiclient/discovery_cache/documents/acceleratedmobilepageurl.v1.json,sha256=99eu0-KK40rpZzGwARYpnSf4X9kG3SznyQujWy2bNeA,6729
+googleapiclient/discovery_cache/documents/accessapproval.v1.json,sha256=H_ox9PT6amf4GwbP30-PLSqvZVS5_EP9hBF86QSAJ4M,48620
+googleapiclient/discovery_cache/documents/accesscontextmanager.v1.json,sha256=xASnQuVrjOJ6E9ftyUwzRzcJU4RFYXIpt_LBxEi_PjA,108252
+googleapiclient/discovery_cache/documents/accesscontextmanager.v1beta.json,sha256=Vsv_nz7ubkkbGwNgxirN2AWgUuYH0mADUx9LFaqKu_c,51062
+googleapiclient/discovery_cache/documents/acmedns.v1.json,sha256=EBjn_jpXlcUIsteONt8F7wWkBz9bGNNM-kFjsyr7BZk,6858
+googleapiclient/discovery_cache/documents/adexchangebuyer.v1.2.json,sha256=0-XVzUa3kcqFiCZYk_W0au5mIV5-w3AgBtQJsSnWL9w,20475
+googleapiclient/discovery_cache/documents/adexchangebuyer.v1.3.json,sha256=6lFs8ULCDsQuo8s48oBpFdDCVbwICk6MNaCvDVrhP9Q,59967
+googleapiclient/discovery_cache/documents/adexchangebuyer.v1.4.json,sha256=pgYvRj3gK2vBxH7Dh4YoCqd7TXTzTDPQqVqOlVRxITk,136987
+googleapiclient/discovery_cache/documents/adexchangebuyer2.v2beta1.json,sha256=QnMIHAmKovMHiTmLK7vDS0Jiy4hWlvrhC33KwNlUfmU,231555
+googleapiclient/discovery_cache/documents/adexperiencereport.v1.json,sha256=moMMTVilROMJYXaS1zxN9rLK9OULf2sYxm-CCCEVOFI,6644
+googleapiclient/discovery_cache/documents/admin.datatransfer_v1.json,sha256=QJtIacDAKGSHgTNgjDJI0wnwYpbyDDuYe5x7mshHeZw,12021
+googleapiclient/discovery_cache/documents/admin.datatransferv1.json,sha256=QJtIacDAKGSHgTNgjDJI0wnwYpbyDDuYe5x7mshHeZw,12021
+googleapiclient/discovery_cache/documents/admin.directory_v1.json,sha256=X4Bj6eIBqItbqzC83_u4b4vez7GwrQpDeJzbg_pHfnY,267036
+googleapiclient/discovery_cache/documents/admin.directoryv1.json,sha256=X4Bj6eIBqItbqzC83_u4b4vez7GwrQpDeJzbg_pHfnY,267036
+googleapiclient/discovery_cache/documents/admin.reports_v1.json,sha256=n3hV3ykfixJ9zW24ErOYO17h9i61gZoM-VIxvln1IgY,52276
+googleapiclient/discovery_cache/documents/admin.reportsv1.json,sha256=n3hV3ykfixJ9zW24ErOYO17h9i61gZoM-VIxvln1IgY,52276
+googleapiclient/discovery_cache/documents/admob.v1.json,sha256=dY2_PlygEmcyVjZLK7WeCU04J4kknBQCV0nQa8B6tqY,52164
+googleapiclient/discovery_cache/documents/admob.v1beta.json,sha256=mGP3DYdhfrRVkHW7hADDWwtTK45xcNmWcd6b23PARac,91957
+googleapiclient/discovery_cache/documents/adsense.v2.json,sha256=8CLNcmoFm4Ojuni521gp2YW3PP1hk0xvJOjVWOiRaKo,94853
+googleapiclient/discovery_cache/documents/adsensehost.v4.1.json,sha256=kKGnNG3aFbYwLFOLJkIGph3m7vC2heaob68rlmV4J28,37965
+googleapiclient/discovery_cache/documents/advisorynotifications.v1.json,sha256=759DT397nGQUCFt5BjKA82KXMZc3ba_WrkFyvVe55vo,18927
+googleapiclient/discovery_cache/documents/aiplatform.v1.json,sha256=V14Z_K4wk4gWdu4P_sSylK3xDX8Jt9eKJkfR1_adYe4,1494187
+googleapiclient/discovery_cache/documents/aiplatform.v1beta1.json,sha256=Ra8QIyOnwnfeGz8AgUVgLn4qvhBGMurPvqziTE2YCco,1618560
+googleapiclient/discovery_cache/documents/alertcenter.v1beta1.json,sha256=Wx7ZL1caCKJnX3YLdEGc29xNdHn8UjlsbKB36O9scEw,64337
+googleapiclient/discovery_cache/documents/alloydb.v1.json,sha256=Lx8c8ktUDTwp0xYg5hZflOz2wb3ExZ0tPx1SKM0VVdk,145330
+googleapiclient/discovery_cache/documents/alloydb.v1alpha.json,sha256=TYlFQaHviQZnP__0rrImFi-KUwVN7YyhYm81Lqxm45k,152092
+googleapiclient/discovery_cache/documents/alloydb.v1beta.json,sha256=RuGnCzLaNbcwUaWOfrT-X-xbkOb0_L7W8FP86iPuEic,148114
+googleapiclient/discovery_cache/documents/analytics.v3.json,sha256=pq5nLmdNvFR-k8Sre800U3mI2tU6hVZGdQCNX_VJNiE,195428
+googleapiclient/discovery_cache/documents/analyticsadmin.v1alpha.json,sha256=nvkERO-rH--9w1bmPVPjCHorpefaXPMMHCOfIaO1ej0,300642
+googleapiclient/discovery_cache/documents/analyticsadmin.v1beta.json,sha256=JX2NbhOiIYhLHwVeUVQO2Q4zYz5Q3DUWKIgpaTCye3M,113984
+googleapiclient/discovery_cache/documents/analyticsdata.v1alpha.json,sha256=3rdVFqqDjQQ7-0qi_UkAYpRthvRKjoXJ8SJzulNHBn0,80445
+googleapiclient/discovery_cache/documents/analyticsdata.v1beta.json,sha256=cQKz1o4xZW9lwssbN5hM6AIxPQ58tNorXFHDlisDRDg,94704
+googleapiclient/discovery_cache/documents/analyticshub.v1.json,sha256=Snx_l8zM2dMcYP_EVHE5zAqyt2CmQKp-kJSNtM4Y6Ow,70144
+googleapiclient/discovery_cache/documents/analyticshub.v1beta1.json,sha256=szcnILUyqVtfhE3lAWMSLcYCSCmVEogckeMChg5jHxw,54367
+googleapiclient/discovery_cache/documents/analyticsreporting.v4.json,sha256=8S4FyrAstnlesJ3ss4fYM7V18YvJgMh9S9jIo-vAOY8,61333
+googleapiclient/discovery_cache/documents/androiddeviceprovisioning.v1.json,sha256=wcftJrA80Mm5DaQFUM1x07ZEaNs2V_ObHIM_N8IDldI,60784
+googleapiclient/discovery_cache/documents/androidenterprise.v1.json,sha256=il72tGm2Z1g39l6x2lfLq2BoKIP1rc4hJCiRtJh9XgE,155978
+googleapiclient/discovery_cache/documents/androidmanagement.v1.json,sha256=p7PmFr2vk7CxlMaDbYHinZzFiHfm_gxIMSFZ-mORWHw,245467
+googleapiclient/discovery_cache/documents/androidpublisher.v3.json,sha256=CYZZKbRcZgzkLkIjP1G_wVAjiKFa8UkrC-hABXzEoTs,301820
+googleapiclient/discovery_cache/documents/apigateway.v1.json,sha256=ABxMXAvt5ZnIy_KT1XzWFdqBbTNC-ssPfWRsaEX1GPo,67378
+googleapiclient/discovery_cache/documents/apigateway.v1beta.json,sha256=grf2VDSYAk95xNcht23cfnm3dmRo9WTj2tuEtpYhi7c,69091
+googleapiclient/discovery_cache/documents/apigee.v1.json,sha256=fxXLGyXMFKtXnNzM3pmnYkgEGChOiAiNJ62nQY3KwT4,617770
+googleapiclient/discovery_cache/documents/apigeeregistry.v1.json,sha256=hCu1irwJNFHGEQgiTM5HJtqvF1JUeqpBVA8SGUn9syQ,156027
+googleapiclient/discovery_cache/documents/apikeys.v2.json,sha256=_e6Hn1UnDOmVBg36V86fC6t7ErdYq9q22XrBgiWFChk,22139
+googleapiclient/discovery_cache/documents/appengine.v1.json,sha256=pqFjIxynKVJYm8sc6-Ik3hyrV8Y0f4vps1oidquEhtA,145504
+googleapiclient/discovery_cache/documents/appengine.v1alpha.json,sha256=XQ1nsL6jPO2KaS4Tq8IbOnrXzOvcM4Y-qTMSepNMKog,57747
+googleapiclient/discovery_cache/documents/appengine.v1beta.json,sha256=HEApu9yp2nEySARxUhm04HsQ3WDocyTL4A5bNwbJL-k,152894
+googleapiclient/discovery_cache/documents/appengine.v1beta4.json,sha256=yELseCdExWSunuF7afIqEw_94UU7PvKCXDmWkyAbYWg,110912
+googleapiclient/discovery_cache/documents/appengine.v1beta5.json,sha256=OHCPs6izs9jPAEuG8s9dr76KCn3f5lKdfWE5NaQBhZo,110227
+googleapiclient/discovery_cache/documents/apphub.v1.json,sha256=E_GUE04FL-abZKVEtGCxmC5-aEAUnoquk5opTWy0nLo,34997
+googleapiclient/discovery_cache/documents/apphub.v1alpha.json,sha256=yPExiF39zuxLy1pADjBbYdSk3q_ycEdZG7PoTFwZ0cw,93010
+googleapiclient/discovery_cache/documents/area120tables.v1alpha1.json,sha256=FKo2rOr0rGzWvRgDGJg3yJpEi-27hYW_s7MWlOv6tyw,26671
+googleapiclient/discovery_cache/documents/artifactregistry.v1.json,sha256=ESN52J33WKsBbZzEu9mpxkjLE4mUzW3WD4jzysbX28k,111184
+googleapiclient/discovery_cache/documents/artifactregistry.v1beta1.json,sha256=DCImA0WPROLl4LeHC4vG2thGC1YwMZ3ptGOngb0cMKQ,53976
+googleapiclient/discovery_cache/documents/artifactregistry.v1beta2.json,sha256=i1zwAbmPYjRSx8_QE1K4YsgCyxO75tyOBMOhax1dPak,69991
+googleapiclient/discovery_cache/documents/assuredworkloads.v1.json,sha256=2gxxNIRLOZ-gAwrQAgqOI6vJKy_wk5jpwxi0IVz5KSs,57658
+googleapiclient/discovery_cache/documents/assuredworkloads.v1beta1.json,sha256=9MoVrd3Abm3XxBOftiJGw95Rl-gy-4KA1hXt4X7LSvY,59204
+googleapiclient/discovery_cache/documents/authorizedbuyersmarketplace.v1.json,sha256=3MJLUGw_VIuLxVvsxfArbfpVAWqqR4xgVhIHvJzgbjA,104158
+googleapiclient/discovery_cache/documents/backupdr.v1.json,sha256=KjOY-I7P4knpGbAbr9ZKmxx13RzaKBSHpqVrkMIy1ic,49111
+googleapiclient/discovery_cache/documents/baremetalsolution.v1.json,sha256=DgpA2txkVyJ3FJmckMnRzB1LGtD8K6qo8F2eYT6Bi3c,13574
+googleapiclient/discovery_cache/documents/baremetalsolution.v1alpha1.json,sha256=LWBZqVLJQlt0qgc4yoRJWQfwH0oPL8aS4wHGF8LRfzk,19186
+googleapiclient/discovery_cache/documents/baremetalsolution.v2.json,sha256=AJ43Np_auFrFXNwndcNxs8hHK09b_6OtA1ntKggtdR4,101566
+googleapiclient/discovery_cache/documents/batch.v1.json,sha256=5n1t1zBGZf_LUKzG6hbjAHdY9nhmCz5LjSDzTzesA2c,80606
+googleapiclient/discovery_cache/documents/beyondcorp.v1.json,sha256=EvBUd_LQAxfzCefGlRTRwYRnYhvWMWKcnCwkq1Cxlrc,136146
+googleapiclient/discovery_cache/documents/beyondcorp.v1alpha.json,sha256=d7AoiYJCzJjFdG5TIYYg6eiU2XbjgEm8-8ryFyut8mQ,255083
+googleapiclient/discovery_cache/documents/biglake.v1.json,sha256=eLfxvy9ornkj4gRkOn9lDp-mv80rUmwzfQ1IKrRrdmA,26554
+googleapiclient/discovery_cache/documents/bigquery.v2.json,sha256=LbX0fBT1qbqLPJ4IBUzYOvQP2kAvkt2Uduhbj92WKfQ,357180
+googleapiclient/discovery_cache/documents/bigqueryconnection.v1.json,sha256=Lryx_bfhXgo2ceHSmhBJle0b5ws94gQd0zgvx3iYYAI,41794
+googleapiclient/discovery_cache/documents/bigqueryconnection.v1beta1.json,sha256=2TBTau2wji4-6Xd5lWpfA1jwYX5zRGI7kjVHgD7nABw,32721
+googleapiclient/discovery_cache/documents/bigquerydatapolicy.v1.json,sha256=r5uITP8yXQTO8u_9NsolegCPbreYlFwLpQT2D--kOzU,34611
+googleapiclient/discovery_cache/documents/bigquerydatatransfer.v1.json,sha256=jWp3CG1X57yx9w2hkbhOCdsvnbp1Ok8xUxNEeocDO2c,79117
+googleapiclient/discovery_cache/documents/bigqueryreservation.v1.json,sha256=HsPCfclfAPsJm0mlqsBtDCWRTXvvmsfwioSVcEOy5h4,50350
+googleapiclient/discovery_cache/documents/bigqueryreservation.v1alpha2.json,sha256=u1WvY6zx7CYg_q9OhtqB5I8Su4b1kEA7p3VwegecO8U,44509
+googleapiclient/discovery_cache/documents/bigqueryreservation.v1beta1.json,sha256=zVtHmOXJTgtYX3JK9YoFUW8uNNHwi-ZPHs4gQN-Y0cY,60957
+googleapiclient/discovery_cache/documents/bigtableadmin.v1.json,sha256=udwAbAWXjShMSJcvAaxRNI-V5QFFQdE6InVOhHT-y7Q,32684
+googleapiclient/discovery_cache/documents/bigtableadmin.v2.json,sha256=VmJqOXOZagkIRyMKt9iMF9i56j6bP-XvxN7FTSX4wzU,143797
+googleapiclient/discovery_cache/documents/billingbudgets.v1.json,sha256=lx84v6OCypCvZamw9g_ebTLdGLAvDKMiccsqiQ89wCc,27319
+googleapiclient/discovery_cache/documents/billingbudgets.v1beta1.json,sha256=XSm4mRENNWmCRgn9Uyge53yHn65WtZzWBTdpQ2dCKW0,26850
+googleapiclient/discovery_cache/documents/binaryauthorization.v1.json,sha256=zdYF4qS0Xo-Rn8l8u9raZ0-lcNGtXO3T7_IayySnHzg,75166
+googleapiclient/discovery_cache/documents/binaryauthorization.v1beta1.json,sha256=weG6zrlx-vShtDnw9QQ6_hCkAvxX0lYXzHjvFn8tlWg,50577
+googleapiclient/discovery_cache/documents/blockchainnodeengine.v1.json,sha256=mBl87cBUCwV1NALcjUoRXvUHrq_8VSOMcY1MqxTYl9I,32457
+googleapiclient/discovery_cache/documents/blogger.v2.json,sha256=FLFyto5Y0RYXfzVblkE43nWk7fb3OAt2TQDqPONJwKA,21584
+googleapiclient/discovery_cache/documents/blogger.v3.json,sha256=YNlKmKUUegLjjTfDYXe_BwrAJR4We9kXK7UYVejIBzY,44123
+googleapiclient/discovery_cache/documents/books.v1.json,sha256=XbqGaafPpFlUejLo-paOEr5c9jNgX0ORxkcAj2maDmA,109930
+googleapiclient/discovery_cache/documents/businessprofileperformance.v1.json,sha256=J6siJfbCUkjqu4nY7z57P-2i1aBd2XZrii0J7NHU6dw,24089
+googleapiclient/discovery_cache/documents/calendar.v3.json,sha256=XMHsh2ZRY8J0_lDovmKwOhERCCeazu1mAkCcJmaetIk,115255
+googleapiclient/discovery_cache/documents/certificatemanager.v1.json,sha256=AnD9M0Uj9I8uWbV9pzPdWragZ8kVMq7qNJDZO-ClHVw,69475
+googleapiclient/discovery_cache/documents/chat.v1.json,sha256=QrQizgLK7MWVNi1VZyMHrFZQNwd2TWUb0Z6g7m4uZgM,183723
+googleapiclient/discovery_cache/documents/checks.v1alpha.json,sha256=4Pi1KRccMBsfSU5Px-MSYURUv_7PNHQuoK38BD2r7t8,65569
+googleapiclient/discovery_cache/documents/chromemanagement.v1.json,sha256=kLpM83DwlX6XsqSBvulY13FIOWyjI37F1GZdvxvOZvI,132325
+googleapiclient/discovery_cache/documents/chromepolicy.v1.json,sha256=RlRXuDm0b62pfi6RFVscsH3Z1JycVRe-QpZMo_aFy7w,61896
+googleapiclient/discovery_cache/documents/chromeuxreport.v1.json,sha256=ZchY191rNCX0YajM-F1tiv-MYrgV5gV850LhqTIZW9o,22424
+googleapiclient/discovery_cache/documents/civicinfo.v2.json,sha256=pTuIe-4Whtu_FwG9cHT-Pu_-12nw1njXKNTyVuvosyA,37651
+googleapiclient/discovery_cache/documents/classroom.v1.json,sha256=YVeVMkVvuWWUWrSUBALlfQJbSSSewr2SfSiUngtyQkc,160146
+googleapiclient/discovery_cache/documents/cloudasset.v1.json,sha256=O-YT84dxl5t4LEI4CqI5IGvaSa5yrAnJ3BjeGW5-gxw,255497
+googleapiclient/discovery_cache/documents/cloudasset.v1beta1.json,sha256=gJ7U4rzpZf7N5_t4nnD3NnGW6km_ihniNoCal-4OryI,93267
+googleapiclient/discovery_cache/documents/cloudasset.v1p1beta1.json,sha256=dVS_nm2WMrfcDkb8MIE8UTPPeudW0bsVVIgIoHA6Ugk,81424
+googleapiclient/discovery_cache/documents/cloudasset.v1p4beta1.json,sha256=RT5pGxMQhQ_T9JKt97G6G94znnnweuqgSwEnby7Vkck,110574
+googleapiclient/discovery_cache/documents/cloudasset.v1p5beta1.json,sha256=54sr_F77zkl83URwkqKrJgq8z_BF1m12LbkMr2guxwE,80092
+googleapiclient/discovery_cache/documents/cloudasset.v1p7beta1.json,sha256=IZM4-DPW2-tijO5lCXQnnH5Hl_wHc5rQs-zEXdaPcZU,85747
+googleapiclient/discovery_cache/documents/cloudbilling.v1.json,sha256=kxIjT94doZoE-4_X9BvS8KPvR8ipu-WQj4-hZINsWnc,60689
+googleapiclient/discovery_cache/documents/cloudbilling.v1beta.json,sha256=zNr_OIhAHw7mcaSAiC1HvqJGqQF0cNICMGmEQZDMXOY,97458
+googleapiclient/discovery_cache/documents/cloudbuild.v1.json,sha256=7PmPADPZizuSNN-riPV8BG4a34ZKN4gsvVleMYSCtWY,179373
+googleapiclient/discovery_cache/documents/cloudbuild.v1alpha1.json,sha256=tN8lxatpJZdx4mhuKWjgzYeBfbN3ZFgZOhdDI8HdMTc,103536
+googleapiclient/discovery_cache/documents/cloudbuild.v1alpha2.json,sha256=FqGePJRUozQZ3MgYXXDeeBwhKb38oRdL3RDyfn5LKOA,102139
+googleapiclient/discovery_cache/documents/cloudbuild.v1beta1.json,sha256=Z9D0XmsAbFI1Ug2BBm8i4hocf4dzGW0MP_3rprbHH2I,105268
+googleapiclient/discovery_cache/documents/cloudbuild.v2.json,sha256=oDBC_R5KAz7nTLFSAYIF5NmNKy9ktwzkIcdcHC_ieF4,99170
+googleapiclient/discovery_cache/documents/cloudchannel.v1.json,sha256=wHfbvpnnNYBk_qVKdtgGuBCam-tC3DOR8vK1ND9Au5I,239183
+googleapiclient/discovery_cache/documents/cloudcommerceprocurement.v1.json,sha256=NtXgLDh60lxmnNf3YKeRYfUr2suT9B1KvXUyXvfeXuU,40056
+googleapiclient/discovery_cache/documents/clouddebugger.v2.json,sha256=2IO6Cc_uKUbDzdXhyc02Xr2Y7vd8TIRq_2iWDhwGYPE,53446
+googleapiclient/discovery_cache/documents/clouddeploy.v1.json,sha256=HjicjIt4URM4PAYggD40AvhJeokQBP66YLJMKGbJOoA,218021
+googleapiclient/discovery_cache/documents/clouderrorreporting.v1beta1.json,sha256=iRjIOrCLpsipfocbyclCqY2bGv_cwXxAU2jLHBUi6No,31459
+googleapiclient/discovery_cache/documents/cloudfunctions.v1.json,sha256=QT62jWo7jC0v59S_xVJBhmSh7P1PV6QQav7Yo69MD9Q,78839
+googleapiclient/discovery_cache/documents/cloudfunctions.v2.json,sha256=vHOiSoLZ9LgN5nyNb8m9OhGfpXnzyV7unYM_excBuKQ,85649
+googleapiclient/discovery_cache/documents/cloudfunctions.v2alpha.json,sha256=8K5DfPfrQ-XFV0HndFFW_q4-1RP4Pw2P1Cn9U0NFuZw,85804
+googleapiclient/discovery_cache/documents/cloudfunctions.v2beta.json,sha256=_3g6DJVgQSBRU87Sk3ejTcT7IHH55T4mYPsSwaMurR8,85773
+googleapiclient/discovery_cache/documents/cloudidentity.v1.json,sha256=P8fFKnb7QpWfPL-h-TOeNd5S6Z42YcmgCe82Gtw3O0g,145638
+googleapiclient/discovery_cache/documents/cloudidentity.v1beta1.json,sha256=sNYFgwqhjzNjbORq46VFpqfuwpolKlmaDdATOhOoaWk,173445
+googleapiclient/discovery_cache/documents/cloudiot.v1.json,sha256=BLM9X9po2P0rn8osda4MgRc990yAYWFVbRMnoCTjN6w,94703
+googleapiclient/discovery_cache/documents/cloudkms.v1.json,sha256=ljPOFAZpAQNbbvslnEdGcKLkN_CWHpXgVubrIYGaNE8,176230
+googleapiclient/discovery_cache/documents/cloudprofiler.v2.json,sha256=qf2WnzttjLojkDm2PFqF544L2A79nVRLppoa4hAY4eU,14689
+googleapiclient/discovery_cache/documents/cloudresourcemanager.v1.json,sha256=vnGznuRgb6DVC9yveltRcGAyHJnRefeRb5N-KzQ1hhk,101009
+googleapiclient/discovery_cache/documents/cloudresourcemanager.v1beta1.json,sha256=edbZVkJD-gctOnx7TpY7jF-O7cVUGCcE-kYepWYvrzA,58393
+googleapiclient/discovery_cache/documents/cloudresourcemanager.v2.json,sha256=CS9RCjC5xFCCgQ4uOxSrRy22K1wRVS0ROxsEOLJrazA,51648
+googleapiclient/discovery_cache/documents/cloudresourcemanager.v2beta1.json,sha256=1j-C5PH3q9mxcv-D0l3Y_wx5HyM8gB8PHtGf2xt3WjI,51658
+googleapiclient/discovery_cache/documents/cloudresourcemanager.v3.json,sha256=JxIiWUzNaYWNoicPeACVFMDG0_nv3MQ3OZELw98vnwg,125470
+googleapiclient/discovery_cache/documents/cloudscheduler.v1.json,sha256=uNbH16WPLv5EqRH8qEasyrCSagtrv4Kx1byL7Gtzmh4,39868
+googleapiclient/discovery_cache/documents/cloudscheduler.v1beta1.json,sha256=r95yiE_rQSL7La-Yl0g5Qoqmm27S_wkXlFZSl6Fh9lA,41606
+googleapiclient/discovery_cache/documents/cloudsearch.v1.json,sha256=csMOy3Y2Wlnb0uyxV896PuL1pKnVCE7tmkZa614ipY8,246061
+googleapiclient/discovery_cache/documents/cloudshell.v1.json,sha256=x5En6iVciN5ACqytvWbpc70fGsAsAmpDEvnKKiylRT0,22893
+googleapiclient/discovery_cache/documents/cloudshell.v1alpha1.json,sha256=XoIskUrEjFBy7Saxe7cApOT6ShmcKZlwzRDAiIoxgSI,25615
+googleapiclient/discovery_cache/documents/cloudsupport.v2.json,sha256=LquWmiRRWtS_dDLUvtnOUVEDQW-bEW1cUubcd2APj_I,52689
+googleapiclient/discovery_cache/documents/cloudsupport.v2beta.json,sha256=q5lY7tNqaVToVlTzow1UiOupvu7ZqSe-TmjmP-WUS7o,54775
+googleapiclient/discovery_cache/documents/cloudtasks.v2.json,sha256=q6h1i1GxJ3GQrsqvD7tVKJmM-hWEsOen3HpSm_D6sxc,95334
+googleapiclient/discovery_cache/documents/cloudtasks.v2beta2.json,sha256=aWHlGZU3pyCkhv3e6a6RqHfa_5w_ChWyuI8l8LL7oIQ,112220
+googleapiclient/discovery_cache/documents/cloudtasks.v2beta3.json,sha256=iHPYqOFiL2kMatxSI-MvF1HLscDv9BMMThqYm9Rzt00,102138
+googleapiclient/discovery_cache/documents/cloudtrace.v1.json,sha256=bEy74CvGN7ERzvGWDjJU90-GTPt-8fipkEuLOf89vQg,14143
+googleapiclient/discovery_cache/documents/cloudtrace.v2.json,sha256=V4LLKiTLlQr930nWp1D1U4CnSPs8omKVAziQabA7M2M,21080
+googleapiclient/discovery_cache/documents/cloudtrace.v2beta1.json,sha256=kF9rsO-vfj6sGxY12zoOgi9FktIhvz6Zm9LOjE14yRY,11521
+googleapiclient/discovery_cache/documents/composer.v1.json,sha256=tnAT57rhYFdN9dfNgeQHSxrOt1NTgi6N2QDD0mPvgWc,81983
+googleapiclient/discovery_cache/documents/composer.v1beta1.json,sha256=y3oJWMgkvnPW_iCbqm_kIYnA8VLmWJWrzgC5xc7sAeA,100826
+googleapiclient/discovery_cache/documents/compute.alpha.json,sha256=JfpBuB4dRdcm5njKyAE2JSBB6VIoEZ7kewO7eUbHlXM,4226462
+googleapiclient/discovery_cache/documents/compute.beta.json,sha256=1JXhNjndMBIPO_-neHbsKl3aGNXhVDwgDpaEm2GVYRM,3832134
+googleapiclient/discovery_cache/documents/compute.v1.json,sha256=kJeEwkEYxK3FiiWxaXxL0u6EOIbCnp22UltNWq9VVPY,3483490
+googleapiclient/discovery_cache/documents/connectors.v1.json,sha256=icj-aIKVpoJVIJzdqpEIkQZstXK6jP1gLOAsBGo6NSE,202276
+googleapiclient/discovery_cache/documents/connectors.v2.json,sha256=bE-_WTvXaZ4OA58OxcCvBqEHaQDHBsp_dug58L7rTRI,68197
+googleapiclient/discovery_cache/documents/contactcenteraiplatform.v1alpha1.json,sha256=vsde9dbeXatEKwqH5LW0FCKH8bbRCcp4VGhup_SidtA,32032
+googleapiclient/discovery_cache/documents/contactcenterinsights.v1.json,sha256=LbpR3ZpBBFSJbT-k68fxxV_LdrFjco4nw-iOUIYAChk,194847
+googleapiclient/discovery_cache/documents/container.v1.json,sha256=_CBhTggsUgxtC_Fmq_X0mLzAEaS5N7nBa8iCaD2VMGE,282048
+googleapiclient/discovery_cache/documents/container.v1beta1.json,sha256=feiHV_rTjTqGlm6KGM05gfN4nVTAyCQuOgNI2yMYc4U,306860
+googleapiclient/discovery_cache/documents/containeranalysis.v1.json,sha256=ihp406OUpLQzPqZzwvOoTOke-QJ24izMEPXzasu-SOM,193597
+googleapiclient/discovery_cache/documents/containeranalysis.v1alpha1.json,sha256=2m2R2jn21DDlRzAu_f5Y0yZuE5hv5ixomdT2ATy99W4,238310
+googleapiclient/discovery_cache/documents/containeranalysis.v1beta1.json,sha256=jgNyjdFgVgl_ZrGachQ3R95SZzoFZYlAcHDsua76Hl8,209476
+googleapiclient/discovery_cache/documents/content.v2.1.json,sha256=ZGJLiKNtTWoiw-gh6GUEJyDhXvX2Gv-_PF-ZAgTgDmk,581996
+googleapiclient/discovery_cache/documents/content.v2.json,sha256=PuV1X8AOtI9T1gaQphOf14GgA06gplme2e31cRNVSlU,389993
+googleapiclient/discovery_cache/documents/contentwarehouse.v1.json,sha256=A1oNAhms-MeFLGQ-3w_B5WJZ3DuMFgA6c1Pht0wTU-A,4540847
+googleapiclient/discovery_cache/documents/customsearch.v1.json,sha256=KUpcjJ5VxmyHHxmIWw359VhEwhuU0o4ro701LziqQhI,62102
+googleapiclient/discovery_cache/documents/datacatalog.v1.json,sha256=it4dAPOY6v4tsA3Gj_GYaigwtWLcQWjeSCAXFOvZK_o,179703
+googleapiclient/discovery_cache/documents/datacatalog.v1beta1.json,sha256=X2vSUKCz6XBN9ZvGv74jmUKLA-oqo4eHo26HNRZIWSk,178242
+googleapiclient/discovery_cache/documents/dataflow.v1b3.json,sha256=kuSMXoABZhntv2-w7nyudkVlILM0yfkdsH6BmyCWq0A,262923
+googleapiclient/discovery_cache/documents/dataform.v1beta1.json,sha256=xbE8k7ueDDqWEfJ6Zsr99ACKzHO_z8xNBGaLnko5ubE,132239
+googleapiclient/discovery_cache/documents/datafusion.v1.json,sha256=h13v8xC2PruKHTQwN4tihDJ--eVS7fOWMPw1jpY3pCQ,65693
+googleapiclient/discovery_cache/documents/datafusion.v1beta1.json,sha256=JPsCgy1wp1LFjTydrkt5CcudEMgX36OCCu9oC3bm7WE,74698
+googleapiclient/discovery_cache/documents/datalabeling.v1beta1.json,sha256=9lINAzLJl-_ZnLMaN-XYe7wPNwMiQYnmuga6OjuVTvk,202099
+googleapiclient/discovery_cache/documents/datalineage.v1.json,sha256=fkwWtYtNLkVVEFfvTTxTayaU5SZXGRlfne-76Ex19w8,45445
+googleapiclient/discovery_cache/documents/datamigration.v1.json,sha256=GJ_lvn-PLDKB0UgyCZVn5Te5SyQc0eKA4nOxt9vz7NY,207056
+googleapiclient/discovery_cache/documents/datamigration.v1beta1.json,sha256=OFaCR5zXMpIi1ofE7tacpB0sr9JnBjYc4yk4R79fsKs,83752
+googleapiclient/discovery_cache/documents/datapipelines.v1.json,sha256=lOSu-FUVQG87aSsQh8PG7PUHRI02Lfbrjhcnxw9BZcc,38301
+googleapiclient/discovery_cache/documents/dataplex.v1.json,sha256=8iU8jX-TFD-uYYVcaJid_yFTslSq5UAtJsjLF-ITWG8,333254
+googleapiclient/discovery_cache/documents/dataportability.v1beta.json,sha256=rB9RYLy36wJPhAelXBFxxU6gYM2W1qDY7GMtk0zSPf8,19686
+googleapiclient/discovery_cache/documents/dataproc.v1.json,sha256=iNlfhAaPa6f747IX80sCzdomwYXCjBveIXXiX6-H5CE,287208
+googleapiclient/discovery_cache/documents/dataproc.v1beta2.json,sha256=jfuxbI0IJ3EFmdqwKxUp_DvxU6gLnDToTAicOhZM0qo,256974
+googleapiclient/discovery_cache/documents/datastore.v1.json,sha256=5-T7g8CQeSIx4cPxhTNEfdE75HQOeI1-k1HlVKLA6wE,89723
+googleapiclient/discovery_cache/documents/datastore.v1beta1.json,sha256=rgs9Z4544PvqiLOEDcNVcfkOt-wPywU2beGxmrvXNcU,28932
+googleapiclient/discovery_cache/documents/datastore.v1beta3.json,sha256=8iMtwv8hgoRRATWlXzGrG7sfAxByzUe_6tIhzDwz7dI,66717
+googleapiclient/discovery_cache/documents/datastream.v1.json,sha256=xcwp50aizs8KpDT3-TpXiCjQ14eDdR4nMhOq8EWd9O8,92271
+googleapiclient/discovery_cache/documents/datastream.v1alpha1.json,sha256=dpKrxt-Ie7tic_U9RFaFCjU57szmRArtMActYYTpXuE,78725
+googleapiclient/discovery_cache/documents/deploymentmanager.alpha.json,sha256=4MykAF5huetKOvUHydVlzr2FEDP9safoH0Y4f-ZiLVw,144156
+googleapiclient/discovery_cache/documents/deploymentmanager.v2.json,sha256=cvQ4JrLmnZT6evG4afC0Y_iOTjwp7ja35_jHddF7PoA,98786
+googleapiclient/discovery_cache/documents/deploymentmanager.v2beta.json,sha256=yNNHijtsw2y2lVsh3kvLwR6c_C47H4qFN1GWTaAwqSM,140320
+googleapiclient/discovery_cache/documents/dfareporting.v3.3.json,sha256=AaucsfQ-9Hx6evOCWHX3E6n6llRjAlvVFEqLD2bpYXE,721039
+googleapiclient/discovery_cache/documents/dfareporting.v3.4.json,sha256=0KvRG3-gvfK5CunHotnX5uTGYwl8W_af_vg0LX0q_NU,748283
+googleapiclient/discovery_cache/documents/dfareporting.v3.5.json,sha256=4p9g307jjj0_2ETEh4Ndc4Fk9i7W-Nuz6j7o_AQU05M,24941
+googleapiclient/discovery_cache/documents/dfareporting.v4.json,sha256=yTJuXnIvytP_KgGdEqTQBzxPo_s2QDS2XMDLOoBLWn0,558112
+googleapiclient/discovery_cache/documents/dialogflow.v2.json,sha256=aoDsbb7IyeHUYFWgXAnwUQpUHFk1_vllUm09G-6q3EM,821577
+googleapiclient/discovery_cache/documents/dialogflow.v2beta1.json,sha256=PvU9Tb6YkhsPYQCutZGofsMRQwzPPByWXbOtwfM7YAY,831177
+googleapiclient/discovery_cache/documents/dialogflow.v3.json,sha256=-F6c60FsLAL7UzQRCz2JPabLWIh16H1TFQrxgcmhLd0,658984
+googleapiclient/discovery_cache/documents/dialogflow.v3beta1.json,sha256=X0gMh7Kg4dBSGfJN1YKgv8dSaZly9pwQC26zktb2Rfs,662558
+googleapiclient/discovery_cache/documents/digitalassetlinks.v1.json,sha256=40C2kG2Wg0weRKFBrmtiiVDqstlsTRQyRut66SNcdJY,27271
+googleapiclient/discovery_cache/documents/discovery.v1.json,sha256=sf57o2wJ2TIDoRewwvePiTUkw-lF3ZZGnQqcGJ51KGA,18046
+googleapiclient/discovery_cache/documents/discoveryengine.v1alpha.json,sha256=RI4tuBTkWayof391FM6Q8aninzJHFrMT2vblywIjin0,421567
+googleapiclient/discovery_cache/documents/discoveryengine.v1beta.json,sha256=bNOxRqIN_4U1lzBaM4lBu9EPvpXBeUBS7ZO8OSoe3f8,382545
+googleapiclient/discovery_cache/documents/displayvideo.v1.json,sha256=MbGaOaaYMrZ04RB0S6mjq29hMnMb41mZcIAi210SFyE,737810
+googleapiclient/discovery_cache/documents/displayvideo.v2.json,sha256=Swl57NydoH_h2_LrHfMux84ySmqJlWeQzjIyijkH8tI,859369
+googleapiclient/discovery_cache/documents/displayvideo.v3.json,sha256=1CtOFQjx-i3uifspclUY-4UIsGVbB4BuRmA2VOuyI8w,870370
+googleapiclient/discovery_cache/documents/displayvideo.v4.json,sha256=PISA0gR3VQyLSSh8RD2csckszOW0nYtA3NfsArm4u3I,9032
+googleapiclient/discovery_cache/documents/dlp.v2.json,sha256=rmPEP-3XTlTdHwGsAqEQhUwna5dwbzcsX2tyjxp2Y_g,391362
+googleapiclient/discovery_cache/documents/dns.v1.json,sha256=7s9MAwO3AGn8HNd__Kes00xubi4IjKJ6P8rRloDbtS0,120384
+googleapiclient/discovery_cache/documents/dns.v1beta2.json,sha256=cV0fLf6jx4TRPxL8WaVkrO1CwmPVOe_sdkG1FZ6T7Co,120768
+googleapiclient/discovery_cache/documents/dns.v2.json,sha256=uMSPBTHSlq6gMc-OC9ap_C9WuE34lwbub8U8wOlwoTY,136579
+googleapiclient/discovery_cache/documents/docs.v1.json,sha256=pZDWGOgSIlASV8m1rWTVPuiQng36LLOiaPkhTQ9ADYU,164629
+googleapiclient/discovery_cache/documents/documentai.v1.json,sha256=Z_M4BwuhTNYcSvNkp6G7-7lhJ1cq9OubSoAR1zhr8_U,292925
+googleapiclient/discovery_cache/documents/documentai.v1beta2.json,sha256=-bDFCJeabyYStCRfUhKTGPVTThp5Rmrz_0a4U0FqRiw,194408
+googleapiclient/discovery_cache/documents/documentai.v1beta3.json,sha256=cB8bel7PNgS6XGlFB-u6jEIeuNDaUI1_Y9x7aZ20uio,318045
+googleapiclient/discovery_cache/documents/domains.v1.json,sha256=wmtFcjlYk-hdwKQVCoAGyA0T6aE7r1oIAszUhtMgXt8,98793
+googleapiclient/discovery_cache/documents/domains.v1alpha2.json,sha256=0CE1iUOZUXunBEKMJBRwz-twO5tblSp2ioJ8Avegy_o,99093
+googleapiclient/discovery_cache/documents/domains.v1beta1.json,sha256=clQO8No790a-Q2YnvPr3-gd0NDuRiBvctc8993C3u5Y,99043
+googleapiclient/discovery_cache/documents/domainsrdap.v1.json,sha256=6vP8wzfvd2SP0-d5qZX4ZAeVyaub1210zXCDTTyLK24,11381
+googleapiclient/discovery_cache/documents/doubleclickbidmanager.v1.1.json,sha256=jHYIGcAx7c3WN_5Mvr6LpEFpkB-VYgyz-rpoi41Fl-8,117347
+googleapiclient/discovery_cache/documents/doubleclickbidmanager.v1.json,sha256=c-oeCNUdKAdNJa3Bm3sqAgb6LpgiCn-s6NUUp4rsUt8,3347
+googleapiclient/discovery_cache/documents/doubleclickbidmanager.v2.json,sha256=9A09bcxlOpuDHBRg2BimRZ8QyXp8zoXB6QJFS7huL1g,23407
+googleapiclient/discovery_cache/documents/doubleclicksearch.v2.json,sha256=rlZ3JMCih1VJQmxxZry_k_XyxeAI1e6KUbXBqlqcJJM,34837
+googleapiclient/discovery_cache/documents/drive.v2.json,sha256=U4OpIAuigBAqj5u_E_4YoOWWSxhl5A81DJN8kk-2OhU,199661
+googleapiclient/discovery_cache/documents/drive.v3.json,sha256=NMz3fQma9JhazpAwpzTy8cJGNTThahiIw8XFxE8UnkA,152964
+googleapiclient/discovery_cache/documents/driveactivity.v2.json,sha256=ifQRqML1mjV_CU4XBhUHZ4CXoqlfEgDWyaZctxqRKi8,36906
+googleapiclient/discovery_cache/documents/drivelabels.v2.json,sha256=eO9RdHl7kHHMNyDlUJ8aym1Xn7lpF_J8NfUk-0Lcg-M,105403
+googleapiclient/discovery_cache/documents/drivelabels.v2beta.json,sha256=VYQGh8chndJDVmXlUeE7ghHKyBzcSx1N5YcG1IRDPT4,106827
+googleapiclient/discovery_cache/documents/essentialcontacts.v1.json,sha256=w80akC1WIblnjBMY4YynwNGQopXlVkLL3daXUbEqoGo,34440
+googleapiclient/discovery_cache/documents/eventarc.v1.json,sha256=lvHwQ6t0ET7XVlMwjB-lI5M0buRVUAGHwZwfa5bd_PQ,86835
+googleapiclient/discovery_cache/documents/eventarc.v1beta1.json,sha256=uVgBZAUFYNrou6oaAzK15TQ1ytVk5az1RIhykC9GYX4,46836
+googleapiclient/discovery_cache/documents/factchecktools.v1alpha1.json,sha256=GGfyJqsRtxZXKY6z8YGZckVPwvoGR3f3Cf7RM1s9FGg,19314
+googleapiclient/discovery_cache/documents/fcm.v1.json,sha256=E8HKU0F44DCvEtcBMlg9WEQ7Aock9HqjkNxyq7vc0JU,30270
+googleapiclient/discovery_cache/documents/fcmdata.v1beta1.json,sha256=B_S9Q8vRcanqNIxAN8Q0qDTGatrx_4l0mTi4i-crZ98,13172
+googleapiclient/discovery_cache/documents/file.v1.json,sha256=a1wDV21Jv3dMUN-zZA6QL567pXsMHwc4Cm1CN7ssk_Y,73231
+googleapiclient/discovery_cache/documents/file.v1beta1.json,sha256=qQze10wJocvwsfWd-8k0RhIV0FrFdFIbgvuQi7oLeD8,84195
+googleapiclient/discovery_cache/documents/firebase.v1beta1.json,sha256=sFeB0b1IkyLY1AT9iFH0_VQi4ZFgSv4tNLYf47cKJ5c,112124
+googleapiclient/discovery_cache/documents/firebaseappcheck.v1.json,sha256=t9G4gAFY73-eqL8MI4-7qDfR0zk87jINuy1twH5K_kU,79978
+googleapiclient/discovery_cache/documents/firebaseappcheck.v1beta.json,sha256=FHpBBFwCtSBHYk-hFKaDAkrlwL0K2MxcUAKSCE6sVpg,115355
+googleapiclient/discovery_cache/documents/firebaseappdistribution.v1.json,sha256=6XV2XReRA06XEk3Q7Jbwt1Vppfgc4gIDEhNq5h4HANI,65361
+googleapiclient/discovery_cache/documents/firebaseappdistribution.v1alpha.json,sha256=D1Kym-m-fu6-0i_GugbUVuMaePJ-oaGb6tCDTBoqK28,36058
+googleapiclient/discovery_cache/documents/firebasedatabase.v1beta.json,sha256=xzxeFpHp2rVL_tOWJMabKcmBbcp4WKVWF87u_fzefbk,15636
+googleapiclient/discovery_cache/documents/firebasedynamiclinks.v1.json,sha256=2iv-vK0Zumso-S4zWwG-NvlC0SgC3pLtL7Xsv4X1sPM,32930
+googleapiclient/discovery_cache/documents/firebasehosting.v1.json,sha256=tzbyxBuLqiArwsQcS4_2PU7hYj7hoOn0aezJrh4a_LQ,26740
+googleapiclient/discovery_cache/documents/firebasehosting.v1beta1.json,sha256=GKk-FEQLWJJ9sGZzc6gXkiwpDpJyQa3BYGKcz2eNjYk,133655
+googleapiclient/discovery_cache/documents/firebaseml.v1.json,sha256=fdZKL7H24N80MaibDj3WdeEB3aJS-N5kyZsuwkCL-io,10354
+googleapiclient/discovery_cache/documents/firebaseml.v1beta2.json,sha256=9wQr8IJqTLI7EdlLsA-5SqJZpF7uoEeDb9WNGOn9ryA,16713
+googleapiclient/discovery_cache/documents/firebaserules.v1.json,sha256=Em0mxXjl7frFeiXh9s1lfwavHN-k_lGmmkMZdrmKU1Y,34167
+googleapiclient/discovery_cache/documents/firebasestorage.v1beta.json,sha256=7YSumWwvH3iNGUYymggyEDs1FX0h1cun677wKynNES8,12239
+googleapiclient/discovery_cache/documents/firestore.v1.json,sha256=6vzdDbppbqi9bSuafh7zwI_epBPdIYfNggeEAYBt76g,162922
+googleapiclient/discovery_cache/documents/firestore.v1beta1.json,sha256=mFFhYRBWnRcMscvCu7QzMJ7RgpJHYUK_m77QyoQXIws,109717
+googleapiclient/discovery_cache/documents/firestore.v1beta2.json,sha256=XOSpy745Hr8ays6aNNRRn6ENqsrlcdtOr84xwR4GAg4,39889
+googleapiclient/discovery_cache/documents/fitness.v1.json,sha256=zZNrBGaHe_u57wA1gzl5QRDEl8JGR-zy2uyDzRaDU54,62402
+googleapiclient/discovery_cache/documents/forms.v1.json,sha256=Shmt2613ZfW42NVmbQM_8yTcgXLmGOcdjl45tDfoe-w,48259
+googleapiclient/discovery_cache/documents/games.v1.json,sha256=GT_795oG9Jomx3bEfdsrMcTlOAALY527Kar8UkPXxq4,95733
+googleapiclient/discovery_cache/documents/gamesConfiguration.v1configuration.json,sha256=yx4-n9cbq9EsEiemMzsxFNBy-0yTHfzv9JYkbm2yzTI,19279
+googleapiclient/discovery_cache/documents/gamesManagement.v1management.json,sha256=fkJKvlul-Fdj2pAEFYklyvad66sf3jKG3-DX8elrvk8,23156
+googleapiclient/discovery_cache/documents/gameservices.v1.json,sha256=E5MFFlyECvsMxSIdo45glqQhAagCDHzeRSo13RPcJ-o,54733
+googleapiclient/discovery_cache/documents/gameservices.v1beta.json,sha256=amIruWItU7wwP_wTWMXY_g2eoY-2FW24EWyPxwQtMvs,54813
+googleapiclient/discovery_cache/documents/genomics.v1.json,sha256=r75lm6eDRH0cxivOOnIQnsm-KkrMY0C8Kp1H6WirqHY,44128
+googleapiclient/discovery_cache/documents/genomics.v1alpha2.json,sha256=OBijBJvThGSucv3G0IILtC8RNGFmdxYrZeLTDFMncsM,66239
+googleapiclient/discovery_cache/documents/genomics.v2alpha1.json,sha256=80yCmc2cqlgSJfPYAxqPMCKFxDWRksFMZzsEkgcsQmI,63816
+googleapiclient/discovery_cache/documents/gkebackup.v1.json,sha256=CO9GTXC6auOekEmnB1D0eNodV7KDdIbtPcjnP9Eonpw,135197
+googleapiclient/discovery_cache/documents/gkehub.v1.json,sha256=_8bNRSAh-ojH3MW8x_j-waHO876DZuISqmK7Of5zaTU,197215
+googleapiclient/discovery_cache/documents/gkehub.v1alpha.json,sha256=Mi82E2HrkKQ5Lfs8PwSPJ4XRRy6cH7zGSIcbtdrIb-s,226179
+googleapiclient/discovery_cache/documents/gkehub.v1alpha2.json,sha256=mQghcUUi5Tpst9-ftIjBSZGdFiCVSjid-HuW4jzurD0,65373
+googleapiclient/discovery_cache/documents/gkehub.v1beta.json,sha256=8NPfo7nJwuaOCfQdGTfivCIOXSrkDLzPsGk6xAe_XL8,209630
+googleapiclient/discovery_cache/documents/gkehub.v1beta1.json,sha256=raVd74JfTE0iZYhBzxYIpvBDYTy5DoOirlFNBTS-Meg,70370
+googleapiclient/discovery_cache/documents/gkehub.v2alpha.json,sha256=U46g4QX-g6dLat7TRq0pKEgwZtGkJOEyeQWtSJrUSBk,13237
+googleapiclient/discovery_cache/documents/gkeonprem.v1.json,sha256=ZHVfoPAm2u8hwXMQUbhAMNaK0C5mZXq5LJGvo75SBkQ,245918
+googleapiclient/discovery_cache/documents/gmail.v1.json,sha256=VOsWe0AjPb8366IvdkYysflSPR-o0kCBPBXb7y25KN8,130285
+googleapiclient/discovery_cache/documents/gmailpostmastertools.v1.json,sha256=tVGPPPxNNzDQyApbk6tHhQf3R8RxJbecmObPgUvu1Ck,19146
+googleapiclient/discovery_cache/documents/gmailpostmastertools.v1beta1.json,sha256=fCaH7K1C5KKj_QLi6vYspkzm899AOdIdwyrd_QhIBc8,19533
+googleapiclient/discovery_cache/documents/groupsmigration.v1.json,sha256=laUZ7RgVGTz2BGcVsBHM5czSlEV6yCEDh_TVvQXLVYg,3947
+googleapiclient/discovery_cache/documents/groupssettings.v1.json,sha256=nIlEpwiGvuICnB4MFl-FeDzDuC2qp-7aDfdh5PnKJ9Q,24702
+googleapiclient/discovery_cache/documents/healthcare.v1.json,sha256=dP0oujNE3dLA0B-1qD9Ev5CNM-UfDXfIzQAmSWjt9oE,365134
+googleapiclient/discovery_cache/documents/healthcare.v1beta1.json,sha256=3Cnc5Ly9YoG7ojMxYA9xDIFmt19dzmx0AhU6jrqvelQ,477757
+googleapiclient/discovery_cache/documents/homegraph.v1.json,sha256=QFkBfXnLSzpBAAuA-xHP8FUlvQWa4iIwT27RAtA6SBI,19439
+googleapiclient/discovery_cache/documents/iam.v1.json,sha256=g50YhVScNvuJjoregCdUbXfUGGxEp420xdfxybNk4DU,201234
+googleapiclient/discovery_cache/documents/iam.v2.json,sha256=LjNEOjMUyBQLIGyeRDvKuKxnumNJuAqheiD0Pz5wmYI,29073
+googleapiclient/discovery_cache/documents/iam.v2beta.json,sha256=pRaQqE3Xtmcj0lQwTHsWYqydMQRaiw5D9oCsE3hvZXQ,29197
+googleapiclient/discovery_cache/documents/iamcredentials.v1.json,sha256=L9l7A5OVJeQMFFxTXI7DF470q_Ez5cxNIv2v3Io1zEE,14347
+googleapiclient/discovery_cache/documents/iap.v1.json,sha256=VPLdTMQzc6JgHbyBYI8IPUR2mnMj8yYe0a9FQXgIzkM,54335
+googleapiclient/discovery_cache/documents/iap.v1beta1.json,sha256=7tFKOfwG-ExNjlrG7cB10g4irBUjy0bPu3eAWWCuB4E,20190
+googleapiclient/discovery_cache/documents/ideahub.v1alpha.json,sha256=PfWK6J_Vqz1f_-mYWGBlCxLWSc8iwHrn9QRu0fl3GBY,21042
+googleapiclient/discovery_cache/documents/ideahub.v1beta.json,sha256=9N4Jk-jHWcWC-Cd5Hb_ARxh7nB6h2HVqrDNPRtei-GY,18785
+googleapiclient/discovery_cache/documents/identitytoolkit.v1.json,sha256=ptjJrFIsXKCr8VtNCxEjwN0pt_7M2OAr1iWvTB86EKs,142789
+googleapiclient/discovery_cache/documents/identitytoolkit.v2.json,sha256=7iJfW8-PBWs-dEAweu-jyRszJ-Lopi58L7uUTd2Y4jo,131384
+googleapiclient/discovery_cache/documents/identitytoolkit.v3.json,sha256=1RFnGfmY0X0BPw4SowmIbeuhS8tGGm5rFXrEtQpV9Jk,54662
+googleapiclient/discovery_cache/documents/ids.v1.json,sha256=dxkwjVIgqM-fAJ907JdQT-lwDl-QCqRxllpf0xdwCCE,43597
+googleapiclient/discovery_cache/documents/index.json,sha256=aToo8a5LH-shdyLDo4-_QRwB6s1vFYEsF2w-gmO3-eg,213200
+googleapiclient/discovery_cache/documents/indexing.v3.json,sha256=H1Jb0J9da-NpaY1FIVZGK5KVuf7VuIGWWEeWzNPsu7s,5776
+googleapiclient/discovery_cache/documents/integrations.v1.json,sha256=dPPsut8SKDMMJNc36aiUxesb9aeachEdVJ91IOTeBQg,546389
+googleapiclient/discovery_cache/documents/integrations.v1alpha.json,sha256=FjjK5CVkadQteejXrzqe_fNTGhls8u_UnOtmgOsjkZE,373468
+googleapiclient/discovery_cache/documents/jobs.v2.json,sha256=L-aDKIJ86eq-tEmOrHZfKD2MmQkCP_uTwCCTQHSJcoQ,227088
+googleapiclient/discovery_cache/documents/jobs.v3.json,sha256=Cw1HQmHR-DnBqOq5bAdUs41VsDR0fDSJ0MIsRqdV5JE,126810
+googleapiclient/discovery_cache/documents/jobs.v3p1beta1.json,sha256=Xq7A0WxNX2W5E9WS-_lN-YqphXyFyrpfUr7GimoRQMI,140799
+googleapiclient/discovery_cache/documents/jobs.v4.json,sha256=s0xJhj2gpUbXvpRnMN213rqaspn4wi1XS3-_ip6LwZA,140315
+googleapiclient/discovery_cache/documents/keep.v1.json,sha256=7VJNwVRhUx0KQxvcORGExGSl9T_D6IlUMqktg5lUShg,16988
+googleapiclient/discovery_cache/documents/kgsearch.v1.json,sha256=mBmJiqN0kB_YF1oAkVU-iT2PlW0lsjICdkmMOuzNETA,4886
+googleapiclient/discovery_cache/documents/kmsinventory.v1.json,sha256=ukUbjBaQJbDoNE3OWzFmwz6h88QKxz2vPdWC59pjmy8,33417
+googleapiclient/discovery_cache/documents/language.v1.json,sha256=Vn5w73p22IerVgZX1yTYOdg7ca5wQrPyx3eM1aElpDQ,37242
+googleapiclient/discovery_cache/documents/language.v1beta1.json,sha256=-Nwsr0Ls6nKWMZSQ3ZzkW1Zdvu2gEqb6yt4r65Z1xOU,38558
+googleapiclient/discovery_cache/documents/language.v1beta2.json,sha256=c0fYY0QKCyLdWm1PeZJTlfyuFBUf9MvWF0rZXfg25xQ,38063
+googleapiclient/discovery_cache/documents/language.v2.json,sha256=vbzKveobFSB-5XUknnZ0j8hb_QqDL1STByMFUn3Fc7w,23236
+googleapiclient/discovery_cache/documents/libraryagent.v1.json,sha256=3Zwyid_dw02jaO1UDRkrigILAp73oLFMleSBhaQ_Hf4,9898
+googleapiclient/discovery_cache/documents/licensing.v1.json,sha256=6sm3RrI4YO8OWIpfChWBsH8CtDmpp30BdLSOutY1gcQ,15821
+googleapiclient/discovery_cache/documents/lifesciences.v2beta.json,sha256=F8GA6wiKvekBWsQRl_F2fK9oU79gImia9zb_3tW48bo,52344
+googleapiclient/discovery_cache/documents/localservices.v1.json,sha256=ioz3xDijIxk41fKr-K9jWP5Oy9bRzSbU5N3r009aY1M,18599
+googleapiclient/discovery_cache/documents/logging.v2.json,sha256=i-fPcuLzCgcBFoLf8Bsn_VU1hK1lSYtijGlW3N21P8w,425469
+googleapiclient/discovery_cache/documents/looker.v1.json,sha256=yvqJ_QHR1E7XHdJfblkrPggokCT4t0R2530xRG0oc7w,59706
+googleapiclient/discovery_cache/documents/managedidentities.v1.json,sha256=Ki_dgmYOjecPamwnj9DPwaKEp5DGjszQkRPnUZ0ogY0,118265
+googleapiclient/discovery_cache/documents/managedidentities.v1alpha1.json,sha256=gfQ7Zoovvtt8kELkoKvnoadCKoehE1SPfutDaXHrsY0,118405
+googleapiclient/discovery_cache/documents/managedidentities.v1beta1.json,sha256=b0GvZCmfJ6hheQDOFn7oz4hK1X1gGg7L9S1ikvDxviU,119358
+googleapiclient/discovery_cache/documents/manufacturers.v1.json,sha256=VApm4cQkRYq4vXrFqrKUg600zooM4VrOc1YwIFp0AU4,39749
+googleapiclient/discovery_cache/documents/memcache.v1.json,sha256=Fn05iiQ5oBjQOV3ZqfYbfISZu-yLZTlCS0jy3zbUkPI,63651
+googleapiclient/discovery_cache/documents/memcache.v1beta2.json,sha256=7thetsJo4nxB2jyDYifzjHQddkehW9Cc19tCJTiSoyg,65577
+googleapiclient/discovery_cache/documents/metastore.v1.json,sha256=dMADdw7eYROD25AU-2sZWlo8k5jyplxG85jI3xSnNlU,110848
+googleapiclient/discovery_cache/documents/metastore.v1alpha.json,sha256=XmyFK6TAPIjheDVpS6kT2TOkBC8SWz22DlXrj6n6oaA,123411
+googleapiclient/discovery_cache/documents/metastore.v1beta.json,sha256=pmJSZKwe6PcX8QoAeJauAuBoJf7k56Sz83T1XUwM-f4,123317
+googleapiclient/discovery_cache/documents/migrationcenter.v1.json,sha256=CWTQ5-iJ7nJRwHoe1Npsbh0URtvSuI4_JTLn7WEfWV8,169804
+googleapiclient/discovery_cache/documents/migrationcenter.v1alpha1.json,sha256=TmjEBE8ushlr2l4o91SbA2a6OkC0REjL6U0dVD96RsM,180765
+googleapiclient/discovery_cache/documents/ml.v1.json,sha256=ph-Cl9ePWGR0Dl8tbmi6jBQirfLiThgn4nHvy5z8bRQ,175515
+googleapiclient/discovery_cache/documents/monitoring.v1.json,sha256=D2R7TXiDv9qRyJpUgmDqjxmaCo7MXQUZj24ts7zovXc,104208
+googleapiclient/discovery_cache/documents/monitoring.v3.json,sha256=t9SNTf8SeSANBRSW2bnninDuNmP8xfmG2qwUy9G1_ys,330218
+googleapiclient/discovery_cache/documents/mybusinessaccountmanagement.v1.json,sha256=euXJnbbBIHKOTawmND3iqVApv0QEdfJoQZIga7qX5Tg,31822
+googleapiclient/discovery_cache/documents/mybusinessbusinesscalls.v1.json,sha256=w_OnSvJZU62j-5J8o3Mt3CmA2Rk7_GKu5DuhsIV574Y,16209
+googleapiclient/discovery_cache/documents/mybusinessbusinessinformation.v1.json,sha256=50ekz5XzpMmuH283OT5z_Vra1nK49wizouIg7d-sYTE,65869
+googleapiclient/discovery_cache/documents/mybusinesslodging.v1.json,sha256=CEJCNObf9-QVl1yH0XqNdNrUDnp-sa8Klr1CbKOq2I8,222585
+googleapiclient/discovery_cache/documents/mybusinessnotifications.v1.json,sha256=Fh8rlOiXP2x1LLRlGyPQiJfVA3bmKl0fGn422XHz-RM,8029
+googleapiclient/discovery_cache/documents/mybusinessplaceactions.v1.json,sha256=X27RCumhMCSopfRN4Q7WhWzGd8wgJWDZP5_RNMA8k20,14578
+googleapiclient/discovery_cache/documents/mybusinessqanda.v1.json,sha256=eJuBNK4CwQ3S-zmxbsANLBrRCUeRtEWNhJQdHuaL0Dg,14263
+googleapiclient/discovery_cache/documents/mybusinessverifications.v1.json,sha256=KjYIvjXAPH7mqZz2ddn8heWaXQBSUbmvUFB_t8HVkOU,25381
+googleapiclient/discovery_cache/documents/networkconnectivity.v1.json,sha256=ysgSIhiJmLxG8fDa4DWdMnj25g7JoGaY4jiRY_YbHbc,185121
+googleapiclient/discovery_cache/documents/networkconnectivity.v1alpha1.json,sha256=UauBN4w4BDq1Q44ke22Zx6WEb8tysOEo_Bt0pYDtd2M,76052
+googleapiclient/discovery_cache/documents/networkmanagement.v1.json,sha256=0lrFK1DfWJ_LdvKLoCChlROyZlxNgFXF4JNydFu85Y0,110616
+googleapiclient/discovery_cache/documents/networkmanagement.v1beta1.json,sha256=By54kcijQRW3a0OK6oEmjCemX7zURjKfd0F136YVRFM,111073
+googleapiclient/discovery_cache/documents/networksecurity.v1.json,sha256=wjDtWnUxJMZ-QSNWCv4vdf9UkxwVtrrDhoKnC3ecSgU,145307
+googleapiclient/discovery_cache/documents/networksecurity.v1beta1.json,sha256=8N8KRQoonC4KxG-YJyK8xkUc2zLoua7Znu9FgYFje54,185770
+googleapiclient/discovery_cache/documents/networkservices.v1.json,sha256=MrGgd2qJKbdgyZuXCsBvciVsbqUtDUxanZ0yIQJmS_o,162482
+googleapiclient/discovery_cache/documents/networkservices.v1beta1.json,sha256=Th06X2291NyEOuH-bcmeelGh-uEWXGo9R5UpQ5YiqmM,190906
+googleapiclient/discovery_cache/documents/notebooks.v1.json,sha256=4Tf8Jbqzmm_cbkHwc62FVpJSnDs4bmCgCulBta-8OkE,158227
+googleapiclient/discovery_cache/documents/notebooks.v2.json,sha256=oJa8pzINaC4io5TDjIeU4gVsvOXY7LvjKPAxwAzJyoA,70455
+googleapiclient/discovery_cache/documents/oauth2.v2.json,sha256=qbqQUKMu6g1IrIHIAfYwTAZZgIDf8j69Lr6i-rzbzLs,6601
+googleapiclient/discovery_cache/documents/ondemandscanning.v1.json,sha256=lyp77tjZFlLL6CvfK0emyDxqHjaYRIcvtpsz7dhT5io,88252
+googleapiclient/discovery_cache/documents/ondemandscanning.v1beta1.json,sha256=yuxSI6PT0XA2dHokzIuSrUdwvwnalizHg6AAcLzdPcg,88103
+googleapiclient/discovery_cache/documents/orgpolicy.v2.json,sha256=rk52GVKpyB_OfP2K6ZNtpzML7mqmIKaxuDU21340OXs,50394
+googleapiclient/discovery_cache/documents/osconfig.v1.json,sha256=cJVl72fUt_8LvLf0r3uY6ncgaBZQ7yIqU8ze58QPYys,132625
+googleapiclient/discovery_cache/documents/osconfig.v1alpha.json,sha256=KNjzve7bSVaT92UkERGI9frgvb6c4xZlPjCoKPBKSJM,100049
+googleapiclient/discovery_cache/documents/osconfig.v1beta.json,sha256=mDUqzpEDeeRQR_AJVKBP4JNsE116EtL8jEmgijbBue8,82652
+googleapiclient/discovery_cache/documents/oslogin.v1.json,sha256=zENWbYA-uIEEOnQRBhtRNiDDbQhRuz7R3-z3p2uUdqo,13069
+googleapiclient/discovery_cache/documents/oslogin.v1alpha.json,sha256=rHY8zK1sfJO8glD1SQ9zSHRujTUEQgfMd3_C1jr7juI,18548
+googleapiclient/discovery_cache/documents/oslogin.v1beta.json,sha256=F7wqw_s7givw6GKpXL7GrzFoBALVjQ6V7UTi1rw_w50,17672
+googleapiclient/discovery_cache/documents/pagespeedonline.v5.json,sha256=4ZDTK_d0y7bXDLDVK8vR0GynJZaJNKnkCDHKptF8jqI,27329
+googleapiclient/discovery_cache/documents/paymentsresellersubscription.v1.json,sha256=K1OH_rOCURVBOOJLTKelyFgRdhdhl3LSFiW2igQCMa0,56810
+googleapiclient/discovery_cache/documents/people.v1.json,sha256=Jv9K9ZK2z1dqP7PGR3I3JxXSAgYESx76GApWbULTx3E,111474
+googleapiclient/discovery_cache/documents/places.v1.json,sha256=A5mB0vPnFgmb80yVLa5J1rFk8kNSjiFyVTYZJ2VcHZM,54422
+googleapiclient/discovery_cache/documents/playablelocations.v3.json,sha256=wbkjdTm2IPCudHBKwwIOaAWwsRxfcinsOiKPR_C5G4M,25906
+googleapiclient/discovery_cache/documents/playcustomapp.v1.json,sha256=OAnUpDWeAXpktNpqlRu06cbkbSR1iAFY-zqQW6B56qo,5201
+googleapiclient/discovery_cache/documents/playdeveloperreporting.v1alpha1.json,sha256=R-ZCx0LQdXNa9pgj02VjTM-ihcob8VoJNdtGT7rY2AU,136820
+googleapiclient/discovery_cache/documents/playdeveloperreporting.v1beta1.json,sha256=LSPZsYORloorgV_fArKPo-h3QR9oCSxBZuGjjTugXnY,136626
+googleapiclient/discovery_cache/documents/playgrouping.v1alpha1.json,sha256=niUwopUBsZRvDI4EBLdk1r6cGXqUinhVWu0w2rcilWk,6307
+googleapiclient/discovery_cache/documents/playintegrity.v1.json,sha256=AmWWZjskDq6OYdQJdQW6j8soPf3KBJqobhnGpTzEJBE,14865
+googleapiclient/discovery_cache/documents/policyanalyzer.v1.json,sha256=XqYPicqh8c9jKyVFe1I3wRdzS1M2DZIEbnEeNBKPElc,7177
+googleapiclient/discovery_cache/documents/policyanalyzer.v1beta1.json,sha256=7FLqFzqd5UecnYRtoErQ9fVtpRpszEVteIQFsm-lamo,6796
+googleapiclient/discovery_cache/documents/policysimulator.v1.json,sha256=yZ4IQvIx0suJMObdRoyX_M9eWeTv9PiaRLfjEnr7LNU,88010
+googleapiclient/discovery_cache/documents/policysimulator.v1alpha.json,sha256=i4fV2JggzKqVPhgdJtrABsHYQGh6C1xG5Z5cbgj8qcA,110666
+googleapiclient/discovery_cache/documents/policysimulator.v1beta.json,sha256=Ku3d0_OTLWHpfnplxdw8-dYaiju8yfs8uteTZEuQzTs,110542
+googleapiclient/discovery_cache/documents/policysimulator.v1beta1.json,sha256=Q1ufGYQSG04J4ufoZ0JH4hgwdeN1QeW1dhREOv-b0Pw,54107
+googleapiclient/discovery_cache/documents/policytroubleshooter.v1.json,sha256=VZfJ1xhr0ZJ75-fvf33JO03no0ThSGEpD0Tiqh9FJP8,31718
+googleapiclient/discovery_cache/documents/policytroubleshooter.v1beta.json,sha256=69tWa83g_IjwMIP_pgsOVqkE5EC6VEGT3lbiOmlwjA4,30156
+googleapiclient/discovery_cache/documents/poly.v1.json,sha256=MI1zdlmWLLalpCwlx22n33nkszug6pksZb9_1x96K90,27091
+googleapiclient/discovery_cache/documents/privateca.v1.json,sha256=TNtpBtNO_Y5Lgtn6lGh5IcKYdjpVCd_i0woBYbUpUB0,147030
+googleapiclient/discovery_cache/documents/privateca.v1beta1.json,sha256=WSl9fGrIs13DNOP-E6fCN4IAuYu7YDdLT2Yz7am9T84,44001
+googleapiclient/discovery_cache/documents/prod_tt_sasportal.v1alpha1.json,sha256=_YdieuPjmpHSYehx3Ac2s-dKn-WBlbh9sLrrnPWjAic,99871
+googleapiclient/discovery_cache/documents/publicca.v1.json,sha256=n2dtTPyZNLDh2iX-LkmIDoOLg2YRWwPLD1_4zU5efiA,4849
+googleapiclient/discovery_cache/documents/publicca.v1alpha1.json,sha256=5MD2Un7y5zelJd6EhBMEEAu3nnDcjFZ77jFksekz98w,4873
+googleapiclient/discovery_cache/documents/publicca.v1beta1.json,sha256=_sVWh2FhudyggQmvRdHLKDraqlNN4aPRyBOaAwhqWQQ,4869
+googleapiclient/discovery_cache/documents/pubsub.v1.json,sha256=-4WLxRSRn2whbJJobBDyWOisVSNvFzX_PbKH9VCs0zs,115912
+googleapiclient/discovery_cache/documents/pubsub.v1beta1a.json,sha256=t35d1PQUL0afAEFIJMlCQvF-D6Bc0NxZ68SB1Z8Sqq0,24352
+googleapiclient/discovery_cache/documents/pubsub.v1beta2.json,sha256=sLJst-1RFgidOmB5YQJmFV_Cc2uwuCTQIKUn77EeMWw,49462
+googleapiclient/discovery_cache/documents/pubsublite.v1.json,sha256=cGJj_klL3_-fP-MewWOX748xzYTHc5NPZsGlcWjPsZU,53050
+googleapiclient/discovery_cache/documents/rapidmigrationassessment.v1.json,sha256=ThgiBspqeOFM-yeBAYOGUTVCC5pOFF0Oh5v9Qwm9QY8,32479
+googleapiclient/discovery_cache/documents/readerrevenuesubscriptionlinking.v1.json,sha256=vJc33x9vJcyLycvuMvCXqd3uWf7IkctAmOCSz7SWItw,8887
+googleapiclient/discovery_cache/documents/realtimebidding.v1.json,sha256=H7ZbaGQGownIhDAcmgbhBLq_EVAO2zsD7HqcQG4DsQ4,117222
+googleapiclient/discovery_cache/documents/realtimebidding.v1alpha.json,sha256=0aZYwk8L6hmk_xVK_rr4BwT4FPEn56oiZWKVqin92rs,18365
+googleapiclient/discovery_cache/documents/recaptchaenterprise.v1.json,sha256=VjJU0fUFD8VH323pBpy5f6N26L35ZWU5JEu2STq-Wow,84140
+googleapiclient/discovery_cache/documents/recommendationengine.v1beta1.json,sha256=9XIZlsyAmXs8_UVT3n79qL34LBOW7qvgfXBkKtZftEg,87898
+googleapiclient/discovery_cache/documents/recommender.v1.json,sha256=I8O-ZxI0DXdBfBjSsoAgtM50Y7GN3HABtbQCtZSmEVs,98451
+googleapiclient/discovery_cache/documents/recommender.v1beta1.json,sha256=aWy3ymA4C3LY44i35qbeGz1aiz1M6uiBJeoyQhLsYOo,110032
+googleapiclient/discovery_cache/documents/redis.v1.json,sha256=auhgXOKj3Pu-FqWTh8wePSVKzsW9Jzf35jZ3um5qzVc,67548
+googleapiclient/discovery_cache/documents/redis.v1beta1.json,sha256=IDa6zNz7y0cW36PAXsREZlwIR8BAjeuPW5eqjh0BuG0,68354
+googleapiclient/discovery_cache/documents/remotebuildexecution.v1.json,sha256=7GFLt5IiI_-OYe68YsVR99EYWAZbRDdQ7MBSu2yU8n4,117609
+googleapiclient/discovery_cache/documents/remotebuildexecution.v1alpha.json,sha256=hSDoD01FhwXYIVbrOwSe7cLI19DoN1AwZm4ylBQzTgU,124864
+googleapiclient/discovery_cache/documents/remotebuildexecution.v2.json,sha256=EII61EfJ8sqYPcx-Ilk5j-XosxGTIfrN48mLKLkGfnQ,148991
+googleapiclient/discovery_cache/documents/reseller.v1.json,sha256=UWM1f_2X_w_xzX59LBLzFJJgUBdLqf4H0b2ADWmb3Ww,48941
+googleapiclient/discovery_cache/documents/resourcesettings.v1.json,sha256=VOberQ18pk9AHabWtFTFQA8MIfrWgHIfF5YT0zNAUSU,22202
+googleapiclient/discovery_cache/documents/retail.v2.json,sha256=Q3KpHbLSkKfMVlBObpf3toc05Y4hHhz_Jxf6eQ3b4DU,344022
+googleapiclient/discovery_cache/documents/retail.v2alpha.json,sha256=qwRxMzNIiQaQbCVtjj4DvLlFJTVxnd0cvyxRHO7hG6k,375255
+googleapiclient/discovery_cache/documents/retail.v2beta.json,sha256=zJI_HikeswKe91bhmRsTI70CRBaaXwMDOmIPJY0Oj-w,357002
+googleapiclient/discovery_cache/documents/run.v1.json,sha256=9KEtZOg2HHx7CzAcJ1Zmqttk7b0HFAnDTJQVCokMUnc,183716
+googleapiclient/discovery_cache/documents/run.v1alpha1.json,sha256=Z0H_iXudGt7JDmcS4XZx09xCSBXPBv9MCktQqrAiayA,69965
+googleapiclient/discovery_cache/documents/run.v1beta1.json,sha256=huF9CDeN1HdW5tai_neIoAPtXOqg1t_KxstLag14WiE,40411
+googleapiclient/discovery_cache/documents/run.v2.json,sha256=cMi1ZsG0_p6XBm8wiLoGDfpdjbO8e4R_ILx5l6I3Pm4,150586
+googleapiclient/discovery_cache/documents/runtimeconfig.v1.json,sha256=oEXXM-st0IhI_Z8vBbSMdSPsd3QS2P0r5hyckxB10b4,10287
+googleapiclient/discovery_cache/documents/runtimeconfig.v1beta1.json,sha256=qIOkD1nAYMB793kSkZiWbc4iHIrWwqFLbJu8pq7jmT4,55138
+googleapiclient/discovery_cache/documents/safebrowsing.v4.json,sha256=L0TztOuPU-OR4iCby9mJmaXhV3ZOxeKUmtjcSZP48m0,38346
+googleapiclient/discovery_cache/documents/safebrowsing.v5.json,sha256=3CtygyxG_lfFoKtc9Ow5Zzfm17D2yh5ytAjoBgk8eXE,7187
+googleapiclient/discovery_cache/documents/sasportal.v1alpha1.json,sha256=5hWvK9XxdNW3XvLhOU5bMipIoBKjG1dEqCw05-3j0nQ,99126
+googleapiclient/discovery_cache/documents/script.v1.json,sha256=f-p5H9wTplAuB5Z4FPNsKDQpCLnLe0c5gRRQNfNIx0c,52104
+googleapiclient/discovery_cache/documents/searchads360.v0.json,sha256=mzSW81As09dUAiKxZOcQeGVX1s0jgMPOEb8hS7Nm6i0,296450
+googleapiclient/discovery_cache/documents/searchconsole.v1.json,sha256=kvD_GtAfbWcxgD7LPscE9OixglduhTlK8jganrEGZaY,40068
+googleapiclient/discovery_cache/documents/secretmanager.v1.json,sha256=WvTXdWmxmvKA5Z3nHfpss_X3h_Ex4l_r55pFOqcvccs,69858
+googleapiclient/discovery_cache/documents/secretmanager.v1beta1.json,sha256=tdfpnSJkZkD7hCD_6OuJ0qlBUqRmEOZI1vCx1qrzRGc,43202
+googleapiclient/discovery_cache/documents/securitycenter.v1.json,sha256=sJO7j6ullRZLJdu-8TDvuwKBk5gP9m2P7ngYeOuDLx4,407526
+googleapiclient/discovery_cache/documents/securitycenter.v1beta1.json,sha256=_eNw4NoQ5sLx90y2_YBVsZe0J2dLml07pHHn1tkJjzY,172382
+googleapiclient/discovery_cache/documents/securitycenter.v1beta2.json,sha256=nyE3XTUOTr2FG4IAw8TW_Uml6GloGlKyboK5Wi5RlVc,191716
+googleapiclient/discovery_cache/documents/serviceconsumermanagement.v1.json,sha256=8hbXAbo34FQ_bnnISNMvw--qxJVJLl-q_v0-tPlGgeE,156983
+googleapiclient/discovery_cache/documents/serviceconsumermanagement.v1beta1.json,sha256=Q-aVYB0HFCIKqnQRXBrJU-8LoNQ92gkufMxYypjsnM0,152639
+googleapiclient/discovery_cache/documents/servicecontrol.v1.json,sha256=FxyMrIDi5hZXqrNVJA99uei7zRBwLpjG7SgkyEG414o,95274
+googleapiclient/discovery_cache/documents/servicecontrol.v2.json,sha256=TKQ9MmzJdxEUlEjD9VxRkpBNmvxNiU-GXGdB6wl9XoU,51508
+googleapiclient/discovery_cache/documents/servicedirectory.v1.json,sha256=T2hrXhNAkntzL-szfOCyt_8965m8Ni6EEIgTaOCBTbg,54213
+googleapiclient/discovery_cache/documents/servicedirectory.v1beta1.json,sha256=TrMSe024v933Zu5TmJrDMbRwMJmBWc36RWEB3QqOneI,58637
+googleapiclient/discovery_cache/documents/servicemanagement.v1.json,sha256=KEOow2KiUeRljNf_ieRXJVvNxFo2P5D1F7eWkbi_vnw,177773
+googleapiclient/discovery_cache/documents/servicenetworking.v1.json,sha256=zhohLwgD6as_IjoE5_RyPWwYmsZjuGLkLIoa6yH0Yp4,191069
+googleapiclient/discovery_cache/documents/servicenetworking.v1beta.json,sha256=WAGSoVdiXV-wbd1KcLJr-Hc0IrH1eDW0kg7VXtBVwkM,144352
+googleapiclient/discovery_cache/documents/serviceusage.v1.json,sha256=jwdco_If2Bel6tO0Z7qSfaWl_emF-7bWeuZsHfuM6eI,154088
+googleapiclient/discovery_cache/documents/serviceusage.v1beta1.json,sha256=geGgabt4lg62py0-xUgKUIC_ZYYEQnMNnpnHhuXshkI,189157
+googleapiclient/discovery_cache/documents/sheets.v4.json,sha256=RqxJO_kFitwncyzrT9iQRUlQR6IBPkLvJcg3zqwAk64,284998
+googleapiclient/discovery_cache/documents/siteVerification.v1.json,sha256=xvq_xsL575_6zsUckW9eo-jWWdxW6Ng13g8ohB2uGLA,8419
+googleapiclient/discovery_cache/documents/slides.v1.json,sha256=3Aeq7NTwDm1w2IBikBlpNHiyviw384mpY7BF90_2ygU,184016
+googleapiclient/discovery_cache/documents/smartdevicemanagement.v1.json,sha256=p3yK8s_QbWAraoWNW32gjWf1drh8WASBYbPhJmx6reE,12914
+googleapiclient/discovery_cache/documents/sourcerepo.v1.json,sha256=GaV7S-XE7FFCeOlt7Q_2uWPpBV-Ve18B1JzW-5-yfWw,38294
+googleapiclient/discovery_cache/documents/spanner.v1.json,sha256=uf6tFHUwHI3jTtsn6x1hbTaJARaagKaP0C6acqXxssM,281137
+googleapiclient/discovery_cache/documents/speech.v1.json,sha256=0A6DVRbXDowpBcEOBEgDu7gPSKEUbvq26NzWejzO7-U,62992
+googleapiclient/discovery_cache/documents/speech.v1p1beta1.json,sha256=J10_k49TgIq-CvkviAHuWnQWQZmMr_0yelYX_XJsrIk,64146
+googleapiclient/discovery_cache/documents/speech.v2beta1.json,sha256=uKUcKcn6zNl_egbyhzSYNTlr_o2tpGelQ0WctfRAU54,18493
+googleapiclient/discovery_cache/documents/sqladmin.v1.json,sha256=xAalypterNcFlcavN74-LeYWTbJe_1gjCfpeO8sLYgo,170634
+googleapiclient/discovery_cache/documents/sqladmin.v1beta4.json,sha256=27jnjGEz7MHXVCumjkLfKDlLza_EO2pbFFc7bQqL4zU,171801
+googleapiclient/discovery_cache/documents/storage.v1.json,sha256=qcGjkbEisykZVI7C-PxiBAN3ywqDckNAidldHtumlw0,174941
+googleapiclient/discovery_cache/documents/storagetransfer.v1.json,sha256=qaL3zg7YMusVHh4QL8jbSVNlioDGyZEUB2O5brm722s,80154
+googleapiclient/discovery_cache/documents/streetviewpublish.v1.json,sha256=-LOHkSUfuiMiY_LjfX0OdPwUPxBQj9i1W6XB5Cwqwws,48368
+googleapiclient/discovery_cache/documents/sts.v1.json,sha256=JMcIKcg4G2cI4BCfDv1173C1tSknL0nk3Oa36pwOhTU,35625
+googleapiclient/discovery_cache/documents/sts.v1beta.json,sha256=1ziXUibUtEpmFbgiZrx3PKn1AJIPq8vDTPRwF5jENzM,26725
+googleapiclient/discovery_cache/documents/tagmanager.v1.json,sha256=pMF2JuHFmFqwaUhFKoZnq111hmhkbHCbAGdOLmiEtWE,90841
+googleapiclient/discovery_cache/documents/tagmanager.v2.json,sha256=zacWTzixEzLSjBp5LxISIzt0mKxQ7K9USVwPrE_XFno,181799
+googleapiclient/discovery_cache/documents/tasks.v1.json,sha256=dqyhz7_tWFOe82d2f6QQQgtVKdA6H8AaphJKuDDGniU,18017
+googleapiclient/discovery_cache/documents/testing.v1.json,sha256=bg95O8SDUPYaxV-JhfTcUwKLjjB93ogX305uU6ZuRQ0,94201
+googleapiclient/discovery_cache/documents/texttospeech.v1.json,sha256=8pRGK7VcI3Pgs0RISrnuE78ZdRMHZIs9CTj59IEwLck,24645
+googleapiclient/discovery_cache/documents/texttospeech.v1beta1.json,sha256=CEvkh4HQdNqWSXqfUOGDRPAwR9zdrqPFi0wGtkjnhq0,23486
+googleapiclient/discovery_cache/documents/toolresults.v1beta3.json,sha256=EeiGVFNKzewVZ1x-Z2UiFJ-KwotfDZ9v4qAOfO8OkIY,129516
+googleapiclient/discovery_cache/documents/tpu.v1.json,sha256=2YTQ9lFEDcGpq1SJdPoSxlJ8f8t4RsROqnTHgD4kps4,32111
+googleapiclient/discovery_cache/documents/tpu.v1alpha1.json,sha256=8OWS97VFyUnOHK-lMQVXyFEeREy_MIZ_dYdR-FyfWuo,32526
+googleapiclient/discovery_cache/documents/tpu.v2.json,sha256=LfQ7RxVjpY6ZFlAErgiFiaIRicRywZf13CYMCbuIjMg,39836
+googleapiclient/discovery_cache/documents/tpu.v2alpha1.json,sha256=u1X0OJELHq0Dn2W_i56e3wBeoaH4VMUX5G2zcOHbWtw,60467
+googleapiclient/discovery_cache/documents/trafficdirector.v2.json,sha256=PPs6kIYcjqkyWbBcY_PqeNF9md0n5t5LFHxj4_Y2v9Y,32631
+googleapiclient/discovery_cache/documents/trafficdirector.v3.json,sha256=Cz6NjXjZHqI2MypQnzZjkH0Nj6qMWUIU0zHI9JQ2CtM,48396
+googleapiclient/discovery_cache/documents/transcoder.v1.json,sha256=U77NSTcI6DM90Xdr69OE7uKX3_DZn31gQ56eioHU3wA,66420
+googleapiclient/discovery_cache/documents/transcoder.v1beta1.json,sha256=nj-9u_qqRm_1H3tlaTpWZ2AEkXlgI6uIzwTqj7DZQYg,60514
+googleapiclient/discovery_cache/documents/translate.v2.json,sha256=uIVhWVyniBow5R6_BxTdei59xpeUcu2hPy_fLyX8Jig,11984
+googleapiclient/discovery_cache/documents/translate.v3.json,sha256=frIa2qfoHh6Pmt_txKmZSIIh-JLM-QSBQ7Vp0OOpi9k,116367
+googleapiclient/discovery_cache/documents/translate.v3beta1.json,sha256=h8g8pVxn4e-2G1RctXd05zkBc45YkB-t7C3m86gbcxI,68419
+googleapiclient/discovery_cache/documents/travelimpactmodel.v1.json,sha256=x6q3pLLtPdqz6006Q8vkIeS7y-Bw4bFSjgpEwO8JTjQ,9908
+googleapiclient/discovery_cache/documents/vault.v1.json,sha256=sqCp52e92fLlSy-Y6BwH41BFQJRCzTUwqR-mh-n8c1o,74046
+googleapiclient/discovery_cache/documents/vectortile.v1.json,sha256=ox05W0ZLG7Yn-6uUQOP3yeXgibQfaQ6tqA6iv865DQg,47652
+googleapiclient/discovery_cache/documents/verifiedaccess.v1.json,sha256=jJ4asP0f8n-3V6sU6j1vHziinnKyK7XLweEj72TVd0c,6762
+googleapiclient/discovery_cache/documents/verifiedaccess.v2.json,sha256=LfoXVU29LmoNQruIHCabnwYW3AjhU5asYQ3uxrGHNGY,17180
+googleapiclient/discovery_cache/documents/versionhistory.v1.json,sha256=714Z8H1uQN70HnzkkJkh0itfJYnCwdHsaPOuJykX6-w,16566
+googleapiclient/discovery_cache/documents/videointelligence.v1.json,sha256=L60YHLIQg71o8JF9QHH1NpmX5GuQLmFFY0Jw9N9jFfY,177769
+googleapiclient/discovery_cache/documents/videointelligence.v1beta2.json,sha256=lXEgK2Uns8_t8aBdTOJosA-jQuxeRaie2TvydX89XWo,169935
+googleapiclient/discovery_cache/documents/videointelligence.v1p1beta1.json,sha256=udR0a8mqBUlEpj-64_o4ZfdYYqzZW_SHuqOLfQZWT1Q,170011
+googleapiclient/discovery_cache/documents/videointelligence.v1p2beta1.json,sha256=UtQ1k-AbngnnJp0f1ne25YyqsL_zDRiWmvkTdpm20jg,170011
+googleapiclient/discovery_cache/documents/videointelligence.v1p3beta1.json,sha256=uTsZzfLEQWrqCDC80ZClbkjxkwQ1MJvlmOBn6bNTHYo,170062
+googleapiclient/discovery_cache/documents/vision.v1.json,sha256=9cQz9Ib-nK7t5xdUFj6jxanMP1Qr0wHmXcdRucay3Tw,312498
+googleapiclient/discovery_cache/documents/vision.v1p1beta1.json,sha256=ADcy2_qeAkj-pKDQVyiqCs3tA35_qmNffRfRl21bv4s,277224
+googleapiclient/discovery_cache/documents/vision.v1p2beta1.json,sha256=ZUOvtaryXVRIfpOXCirV3lSH_MfEHlrpstUXeQKVbGs,277224
+googleapiclient/discovery_cache/documents/vmmigration.v1.json,sha256=B9i8-u7JmPrLbZwbIH-Lsn9CmLPjmrdElC3P-7oPUCI,158382
+googleapiclient/discovery_cache/documents/vmmigration.v1alpha1.json,sha256=FpD6YHsGbA_bejid7_-2L8DrXAhXqXwkXPfa75rZuYg,165846
+googleapiclient/discovery_cache/documents/vmwareengine.v1.json,sha256=Hy2__mJsT-fonI6WvrDop-NEhxqDkAd69TN29JxFRuU,252203
+googleapiclient/discovery_cache/documents/vpcaccess.v1.json,sha256=GH82dEAiwO4Y7ADohLJ7Wh-RLeoAVt-MAuXqQ1tp9Eg,21088
+googleapiclient/discovery_cache/documents/vpcaccess.v1beta1.json,sha256=9dFgZQhoUXer0_YbMJ6Cn3k38FOouCbdcbM4d_JIFws,21487
+googleapiclient/discovery_cache/documents/walletobjects.v1.json,sha256=AhGPp46eWIcxoT49ysqci-2Bf02TK3FuwkZex7Tp4mY,318229
+googleapiclient/discovery_cache/documents/webfonts.v1.json,sha256=nZ4QuMaCE9q2GyJqsOpTV_iax_wG7-7dOM0R8KxEpHI,6169
+googleapiclient/discovery_cache/documents/webmasters.v3.json,sha256=TOTCcpZyUk-dbmnjFVFoQam0hq2wFQg2UV9x-4fuVLY,20097
+googleapiclient/discovery_cache/documents/webrisk.v1.json,sha256=wtE5epsrc1_8XuhPRm7ID_ffvmDgh3eWibawK3qcohk,26586
+googleapiclient/discovery_cache/documents/websecurityscanner.v1.json,sha256=vb_AOFrcnssLP5g9tSixEC63m_uS1KcWkEGC3V67d-M,44164
+googleapiclient/discovery_cache/documents/websecurityscanner.v1alpha.json,sha256=Vp8osc6laibWGDEigzp84rc1bru0ShYVBzY3qfBMf1s,36660
+googleapiclient/discovery_cache/documents/websecurityscanner.v1beta.json,sha256=tMD2VypVUy-uz8_pvMNd4id-CUnQ5kxPOrMHe1Uyups,42878
+googleapiclient/discovery_cache/documents/workflowexecutions.v1.json,sha256=mH6BJltqctp42qLVLO-WmHFmE-WYHix2w96xpQ-TCFA,35338
+googleapiclient/discovery_cache/documents/workflowexecutions.v1beta.json,sha256=P43_XCtVkX2VA8xM5fmXCM2ZGU71UcP6CriyQYnHXmI,14318
+googleapiclient/discovery_cache/documents/workflows.v1.json,sha256=okmTsOxYRQdG6Jnim5ijMNLc6hv_73vAgpU46eU3fK4,27276
+googleapiclient/discovery_cache/documents/workflows.v1beta.json,sha256=1L3Wi6-lSQy13-bKf26-q2s0tVlHIzo20RqHsI_bFhM,22461
+googleapiclient/discovery_cache/documents/workloadmanager.v1.json,sha256=7AxGdLQIuvUKXCt01gHRCWH5rUjE36GxJ-XdP5PZsYo,52761
+googleapiclient/discovery_cache/documents/workspaceevents.v1.json,sha256=08_ZWpAcwjPA6AltLrRNrASDGuJjp7yOKZmcy6ihnMg,2822
+googleapiclient/discovery_cache/documents/workstations.v1.json,sha256=TQFfxJNJ5IfitFnZlAXQS5D2TWh8fwoudxUci5-46qo,94559
+googleapiclient/discovery_cache/documents/workstations.v1beta.json,sha256=CR9mrsYhq021arzQJ-lGaPk_aiuLGZ23fXzTWIoSQa0,94040
+googleapiclient/discovery_cache/documents/youtube.v3.json,sha256=MyBWskXxRSHdYi0rvUc-acctLwEudB3vWgjBiMJB9OU,367690
+googleapiclient/discovery_cache/documents/youtubeAnalytics.v1.json,sha256=KLPoKHO_XT4-LyIZjwSGh44f_htMwjePD2PXR8MH-YY,3220
+googleapiclient/discovery_cache/documents/youtubeAnalytics.v2.json,sha256=KnqRH5u1UJSBADuGYq3FvVWCLSLOtKK8CwsVHishqss,28888
+googleapiclient/discovery_cache/documents/youtubereporting.v1.json,sha256=uBkALwI1nSLqoGJeGUnd0x2Ug1s5J_ft033jG55-Tkw,23094
+googleapiclient/discovery_cache/file_cache.py,sha256=sim3Mg4HgRYo3vX75jvcKy_aV568EvIrtBfvfbw-044,4774
+googleapiclient/errors.py,sha256=9h3uimcMcczBHZJFWAX_YDABzJeJugWB0jmj11rp-LI,5460
+googleapiclient/http.py,sha256=ITE51oqDBqN1-AA5D-Tnlj3egGc_5O0V5xSzBw3UTKI,68241
+googleapiclient/mimeparse.py,sha256=wwouQMCjppTocJtiQhkkTa27kocYwlFRALL2z11Xo1Y,6530
+googleapiclient/model.py,sha256=qEsdqL3KuhQNGKIDxFzABxMNrejPIScDfJaScNNnffs,13322
+googleapiclient/sample_tools.py,sha256=hDBLJNwLluN6AGhaBR-3GXWDrOwRNPvXyseLOyz9l30,4315
+googleapiclient/schema.py,sha256=rR3u8WPQ_V8a7GCUsNuvtf6GxzwuMO0HaqsTBp3tnyM,10414
+googleapiclient/version.py,sha256=Tlrb7tL0HFnCyLn3_CBpBJoNBTLafsmFgEHCMjO9Yc8,599
diff --git a/Lib/site-packages/google_api_python_client-2.117.0.dist-info/REQUESTED b/Lib/site-packages/google_api_python_client-2.117.0.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/google_api_python_client-2.117.0.dist-info/WHEEL b/Lib/site-packages/google_api_python_client-2.117.0.dist-info/WHEEL
new file mode 100644
index 0000000..f31e450
--- /dev/null
+++ b/Lib/site-packages/google_api_python_client-2.117.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.41.3)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/Lib/site-packages/google_api_python_client-2.117.0.dist-info/top_level.txt b/Lib/site-packages/google_api_python_client-2.117.0.dist-info/top_level.txt
new file mode 100644
index 0000000..f907e7e
--- /dev/null
+++ b/Lib/site-packages/google_api_python_client-2.117.0.dist-info/top_level.txt
@@ -0,0 +1,3 @@
+apiclient
+googleapiclient
+googleapiclient/discovery_cache
diff --git a/Lib/site-packages/google_auth-2.27.0.dist-info/INSTALLER b/Lib/site-packages/google_auth-2.27.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/Lib/site-packages/google_auth-2.27.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Lib/site-packages/google_auth-2.27.0.dist-info/LICENSE b/Lib/site-packages/google_auth-2.27.0.dist-info/LICENSE
new file mode 100644
index 0000000..261eeb9
--- /dev/null
+++ b/Lib/site-packages/google_auth-2.27.0.dist-info/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/Lib/site-packages/google_auth-2.27.0.dist-info/METADATA b/Lib/site-packages/google_auth-2.27.0.dist-info/METADATA
new file mode 100644
index 0000000..48bac82
--- /dev/null
+++ b/Lib/site-packages/google_auth-2.27.0.dist-info/METADATA
@@ -0,0 +1,135 @@
+Metadata-Version: 2.1
+Name: google-auth
+Version: 2.27.0
+Summary: Google Authentication Library
+Home-page: https://github.com/googleapis/google-auth-library-python
+Author: Google Cloud Platform
+Author-email: googleapis-packages@google.com
+License: Apache 2.0
+Keywords: google auth oauth client
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Internet :: WWW/HTTP
+Requires-Python: >=3.7
+License-File: LICENSE
+Requires-Dist: cachetools <6.0,>=2.0.0
+Requires-Dist: pyasn1-modules >=0.2.1
+Requires-Dist: rsa <5,>=3.1.4
+Provides-Extra: aiohttp
+Requires-Dist: aiohttp <4.0.0.dev0,>=3.6.2 ; extra == 'aiohttp'
+Requires-Dist: requests <3.0.0.dev0,>=2.20.0 ; extra == 'aiohttp'
+Provides-Extra: enterprise_cert
+Requires-Dist: cryptography ==36.0.2 ; extra == 'enterprise_cert'
+Requires-Dist: pyopenssl ==22.0.0 ; extra == 'enterprise_cert'
+Provides-Extra: pyopenssl
+Requires-Dist: pyopenssl >=20.0.0 ; extra == 'pyopenssl'
+Requires-Dist: cryptography >=38.0.3 ; extra == 'pyopenssl'
+Provides-Extra: reauth
+Requires-Dist: pyu2f >=0.1.5 ; extra == 'reauth'
+Provides-Extra: requests
+Requires-Dist: requests <3.0.0.dev0,>=2.20.0 ; extra == 'requests'
+
+Google Auth Python Library
+==========================
+
+|pypi|
+
+This library simplifies using Google's various server-to-server authentication
+mechanisms to access Google APIs.
+
+.. |pypi| image:: https://img.shields.io/pypi/v/google-auth.svg
+ :target: https://pypi.python.org/pypi/google-auth
+
+Installing
+----------
+
+You can install using `pip`_::
+
+ $ pip install google-auth
+
+.. _pip: https://pip.pypa.io/en/stable/
+
+For more information on setting up your Python development environment, please refer to `Python Development Environment Setup Guide`_ for Google Cloud Platform.
+
+.. _`Python Development Environment Setup Guide`: https://cloud.google.com/python/docs/setup
+
+Extras
+------
+
+google-auth has few extras that you can install. For example::
+
+ $ pip install google-auth[pyopenssl]
+
+Note that the extras pyopenssl and enterprise_cert should not be used together because they use conflicting versions of `cryptography`_.
+
+.. _`cryptography`: https://cryptography.io/en/latest/
+
+Supported Python Versions
+^^^^^^^^^^^^^^^^^^^^^^^^^
+Python >= 3.7
+
+**NOTE**:
+Python 3.7 was marked as `unsupported`_ by the python community in June 2023.
+We recommend that all developers upgrade to Python 3.8 and newer as soon as
+they can. Support for Python 3.7 will be removed from this library after
+January 1 2024. Previous releases that support Python 3.7 will continue to be available
+for download, but releases after January 1 2024 will only target Python 3.8 and
+newer.
+
+.. _unsupported: https://devguide.python.org/versions/#unsupported-versions
+
+Unsupported Python Versions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+- Python == 2.7: The last version of this library with support for Python 2.7
+ was `google.auth == 1.34.0`.
+
+- Python 3.5: The last version of this library with support for Python 3.5
+ was `google.auth == 1.23.0`.
+
+- Python 3.6: The last version of this library with support for Python 3.6
+ was `google.auth == 2.22.0`.
+
+Documentation
+-------------
+
+Google Auth Python Library has usage and reference documentation at https://googleapis.dev/python/google-auth/latest/index.html.
+
+Current Maintainers
+-------------------
+- googleapis-auth@google.com
+
+Authors
+-------
+
+- `@theacodes `_ (Thea Flowers)
+- `@dhermes `_ (Danny Hermes)
+- `@lukesneeringer `_ (Luke Sneeringer)
+- `@busunkim96 `_ (Bu Sun Kim)
+
+Contributing
+------------
+
+Contributions to this library are always welcome and highly encouraged.
+
+See `CONTRIBUTING.rst`_ for more information on how to get started.
+
+.. _CONTRIBUTING.rst: https://github.com/googleapis/google-auth-library-python/blob/main/CONTRIBUTING.rst
+
+License
+-------
+
+Apache 2.0 - See `the LICENSE`_ for more information.
+
+.. _the LICENSE: https://github.com/googleapis/google-auth-library-python/blob/main/LICENSE
diff --git a/Lib/site-packages/google_auth-2.27.0.dist-info/RECORD b/Lib/site-packages/google_auth-2.27.0.dist-info/RECORD
new file mode 100644
index 0000000..1585d2d
--- /dev/null
+++ b/Lib/site-packages/google_auth-2.27.0.dist-info/RECORD
@@ -0,0 +1,129 @@
+google/auth/__init__.py,sha256=wg5MWwRk8nfJFMmUMU2gLirrPdGe9NMwqLkdSwdFwE8,1639
+google/auth/__pycache__/__init__.cpython-312.pyc,,
+google/auth/__pycache__/_cloud_sdk.cpython-312.pyc,,
+google/auth/__pycache__/_credentials_async.cpython-312.pyc,,
+google/auth/__pycache__/_default.cpython-312.pyc,,
+google/auth/__pycache__/_default_async.cpython-312.pyc,,
+google/auth/__pycache__/_exponential_backoff.cpython-312.pyc,,
+google/auth/__pycache__/_helpers.cpython-312.pyc,,
+google/auth/__pycache__/_jwt_async.cpython-312.pyc,,
+google/auth/__pycache__/_oauth2client.cpython-312.pyc,,
+google/auth/__pycache__/_refresh_worker.cpython-312.pyc,,
+google/auth/__pycache__/_service_account_info.cpython-312.pyc,,
+google/auth/__pycache__/api_key.cpython-312.pyc,,
+google/auth/__pycache__/app_engine.cpython-312.pyc,,
+google/auth/__pycache__/aws.cpython-312.pyc,,
+google/auth/__pycache__/credentials.cpython-312.pyc,,
+google/auth/__pycache__/downscoped.cpython-312.pyc,,
+google/auth/__pycache__/environment_vars.cpython-312.pyc,,
+google/auth/__pycache__/exceptions.cpython-312.pyc,,
+google/auth/__pycache__/external_account.cpython-312.pyc,,
+google/auth/__pycache__/external_account_authorized_user.cpython-312.pyc,,
+google/auth/__pycache__/iam.cpython-312.pyc,,
+google/auth/__pycache__/identity_pool.cpython-312.pyc,,
+google/auth/__pycache__/impersonated_credentials.cpython-312.pyc,,
+google/auth/__pycache__/jwt.cpython-312.pyc,,
+google/auth/__pycache__/metrics.cpython-312.pyc,,
+google/auth/__pycache__/pluggable.cpython-312.pyc,,
+google/auth/__pycache__/version.cpython-312.pyc,,
+google/auth/_cloud_sdk.py,sha256=u7tbE3KdHBCzZK8ka47xG3CHHtF0DhFDjmPSgz8lwXg,5212
+google/auth/_credentials_async.py,sha256=bHB28wMULOIEMmYqKEOU06A4co7uIXPcnfVC_TaA6KY,6802
+google/auth/_default.py,sha256=gULLKwjyw_JP_zgab9YkLww4QATthRjo13newNZ0Zgk,28538
+google/auth/_default_async.py,sha256=r4bFozWfioQa4lIEC-psuRsLiVhnJbuW-uQ0daj7s3Q,11575
+google/auth/_exponential_backoff.py,sha256=YEuEn2SzAvoPoesLnGdAKwLKipsdnQQl-R0Qh7v64T8,3903
+google/auth/_helpers.py,sha256=7Zm-uwaZWyFb3fE2JQA-sJUTSBvltTgZkNkMGc7raRo,8236
+google/auth/_jwt_async.py,sha256=5mGab5CkdnBMkQkS4mtNkwFkktp1jBw6G1sYQk8bYKY,5972
+google/auth/_oauth2client.py,sha256=hPxcl_8q6Oxr0hOHPUWaWObxI85Pv-0q6kZhRUrT5oY,5855
+google/auth/_refresh_worker.py,sha256=QSMOpSQnCuIYJjCCGBdorxGrhU8kN0AxATiPo5k__mI,3374
+google/auth/_service_account_info.py,sha256=KGruc_OxS7O7_EADD4JEIjjz_-5Xa1_rlgk1t0p1nvk,2816
+google/auth/api_key.py,sha256=PeieTYceHJIFCo0zQo1EA9NEDL_Ie6S78qmD-6Ig17s,2583
+google/auth/app_engine.py,sha256=LuEaoWM1UwcIUJ6OrLza0tTpqJBXbtzZ3XjN0C-6Wvk,6121
+google/auth/aws.py,sha256=sWrleoR9WlLsCJKczNlZQttnmaR0cIJfEnBlMsoubrY,30480
+google/auth/compute_engine/__init__.py,sha256=BqeTka-oyHFATkys3SGKRlOyWQ8mVV0vVaP2hOwV4Qw,910
+google/auth/compute_engine/__pycache__/__init__.cpython-312.pyc,,
+google/auth/compute_engine/__pycache__/_metadata.cpython-312.pyc,,
+google/auth/compute_engine/__pycache__/credentials.cpython-312.pyc,,
+google/auth/compute_engine/_metadata.py,sha256=YrLNpFUYV7Jq8m6eizqpqR7zwCjxjQuouSk3AsqOg44,12001
+google/auth/compute_engine/credentials.py,sha256=NsBaxSwjEdq0gNxmUIbuYYH8-T8dqwaqDU5pz4UdcRA,18820
+google/auth/credentials.py,sha256=WFHa26v6oenfVBg8BLz3zZBQuQ9FE-ZHksnDTqqvHbc,18465
+google/auth/crypt/__init__.py,sha256=xxBMOPuzD-XOxPvzkleLa2oj4u-9FSjnFmUN3PBk00s,3324
+google/auth/crypt/__pycache__/__init__.cpython-312.pyc,,
+google/auth/crypt/__pycache__/_cryptography_rsa.cpython-312.pyc,,
+google/auth/crypt/__pycache__/_helpers.cpython-312.pyc,,
+google/auth/crypt/__pycache__/_python_rsa.cpython-312.pyc,,
+google/auth/crypt/__pycache__/base.cpython-312.pyc,,
+google/auth/crypt/__pycache__/es256.cpython-312.pyc,,
+google/auth/crypt/__pycache__/rsa.cpython-312.pyc,,
+google/auth/crypt/_cryptography_rsa.py,sha256=o2QTRkfDRLtEBiq-fbpbTWypvxaxUDwzlx2NpXG9o0w,5158
+google/auth/crypt/_helpers.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google/auth/crypt/_python_rsa.py,sha256=L0kgvPXVdEGhajzxLRIgdk2T9rY0nlThrSlSz8Hc8XY,6123
+google/auth/crypt/base.py,sha256=3CJrnQsppR6h-WnTRqSrt1hPEQVkHcwNIVJ_B5M00hY,4190
+google/auth/crypt/es256.py,sha256=hXyeia3g6_TZf-UYdZVzFKgbPrLlgSuR1mMvYKSYqbg,6251
+google/auth/crypt/rsa.py,sha256=QI17aKQsX3gdbJkBef-zsm-X_YBjBollCaoA65Am-WI,1109
+google/auth/downscoped.py,sha256=tg4kQDQZJrMo59rv8VwmGvYLjtnhuvYSMfCfufkBXl4,21467
+google/auth/environment_vars.py,sha256=ML9aFh5gwRStBDjn8BQSQHyWsA-OcMn-RB5FRMM8qOw,3297
+google/auth/exceptions.py,sha256=OeM4KgN0GVHUq8XTfagIwXqPpYXc4zvtK9Eptw8bi50,2962
+google/auth/external_account.py,sha256=y9Bh1rD5nJx9krpwHoerSujruztg6wZW0jO1sMEFpeo,22540
+google/auth/external_account_authorized_user.py,sha256=uC3CG-ZLBXhveUF8-pxUrxAGXlG4hh6NqA0KIUkbm1A,13543
+google/auth/iam.py,sha256=-Pij0ZsUwU43rn7f9uWsYCss8p_4ER1oJu437mamWUI,3651
+google/auth/identity_pool.py,sha256=UUvdjBntr1J3Eu57rOrNy_tBjhrOd9tp-o2cmlIB8GM,10666
+google/auth/impersonated_credentials.py,sha256=BiSYO_F3UvEIsM9z-J1onX0ubMDW4RKasHktnICuku4,16975
+google/auth/jwt.py,sha256=1m_arp5x-4I5UTDaK9y50PSlKnUhnSKFyJW95k3-7cQ,31096
+google/auth/metrics.py,sha256=wx3m95QQCF885wYvPL4T01CHOdCBN5JvFCtbOakd98Q,5614
+google/auth/pluggable.py,sha256=iCqevwmAs8MJ0OdlxnlFFmHDxthNpqVIQXTKjnTyN8E,17306
+google/auth/transport/__init__.py,sha256=Bc9Tx4qozvP1g1USD8pvk7NEVE6hgXrpO-FH2WQFkkU,3621
+google/auth/transport/__pycache__/__init__.cpython-312.pyc,,
+google/auth/transport/__pycache__/_aiohttp_requests.cpython-312.pyc,,
+google/auth/transport/__pycache__/_custom_tls_signer.cpython-312.pyc,,
+google/auth/transport/__pycache__/_http_client.cpython-312.pyc,,
+google/auth/transport/__pycache__/_mtls_helper.cpython-312.pyc,,
+google/auth/transport/__pycache__/grpc.cpython-312.pyc,,
+google/auth/transport/__pycache__/mtls.cpython-312.pyc,,
+google/auth/transport/__pycache__/requests.cpython-312.pyc,,
+google/auth/transport/__pycache__/urllib3.cpython-312.pyc,,
+google/auth/transport/_aiohttp_requests.py,sha256=OWl0SAygFICdOp33rpauBlcmeo0z4N64p4-GBMyjmOc,14554
+google/auth/transport/_custom_tls_signer.py,sha256=E0bfybQJanDnfq9nfcY8d0k2vxFMOV0s05h1GYlEd9w,9680
+google/auth/transport/_http_client.py,sha256=j7Amhmg-ipWztX3W5zOs2kJUFFuGCs0PhI7ZMAvle50,3706
+google/auth/transport/_mtls_helper.py,sha256=cClydxTEeXZXU8Z36LsmX4VYUjaBKjqZoAqiBrydDhw,9062
+google/auth/transport/grpc.py,sha256=59vVJbmvzpW61z0h1Z5xQa3ZMzCu4G3wLYQrAtHFpAk,13937
+google/auth/transport/mtls.py,sha256=-QZN3xe9Jb7OGQ_4Qv_GtyZuRvbRGPbBZEdRjgdqFWg,3793
+google/auth/transport/requests.py,sha256=L0xLVrPaJvDJqqtGGauLRFCK5jXdBKAv29kM1e80h0M,22707
+google/auth/transport/urllib3.py,sha256=cQITCEXvu375iIP749bG5p96Sf2EVBsq63EbKRYkhTA,16049
+google/auth/version.py,sha256=MPKolXQiMK5unwc0-eaReJdA77R-82Z77xpR_wnUjCQ,598
+google/oauth2/__init__.py,sha256=IdFKxhIzlqNIalPgeB2P5hP6KkoxcpNk61hp7P2B85w,1196
+google/oauth2/__pycache__/__init__.cpython-312.pyc,,
+google/oauth2/__pycache__/_client.cpython-312.pyc,,
+google/oauth2/__pycache__/_client_async.cpython-312.pyc,,
+google/oauth2/__pycache__/_credentials_async.cpython-312.pyc,,
+google/oauth2/__pycache__/_id_token_async.cpython-312.pyc,,
+google/oauth2/__pycache__/_reauth_async.cpython-312.pyc,,
+google/oauth2/__pycache__/_service_account_async.cpython-312.pyc,,
+google/oauth2/__pycache__/challenges.cpython-312.pyc,,
+google/oauth2/__pycache__/credentials.cpython-312.pyc,,
+google/oauth2/__pycache__/gdch_credentials.cpython-312.pyc,,
+google/oauth2/__pycache__/id_token.cpython-312.pyc,,
+google/oauth2/__pycache__/reauth.cpython-312.pyc,,
+google/oauth2/__pycache__/service_account.cpython-312.pyc,,
+google/oauth2/__pycache__/sts.cpython-312.pyc,,
+google/oauth2/__pycache__/utils.cpython-312.pyc,,
+google/oauth2/_client.py,sha256=muJCQR_9FA3RpngNnpaAeaLfIwDFz-9X_udMDj0CrCc,17533
+google/oauth2/_client_async.py,sha256=CscoOhgVni9P6Xxy8a2BPGf_spxz_VAloIKvJqe6xfk,10480
+google/oauth2/_credentials_async.py,sha256=hUrucQkcYuYlyCdHMci8tzaVncnjQlFc2sAfNu5Dt8k,4474
+google/oauth2/_id_token_async.py,sha256=o_DViJoWMGlL3zwTbW2unGDBfY569D_VMB4l7bx-Qpw,10115
+google/oauth2/_reauth_async.py,sha256=C6k3f4T0aoVWItl8shYjOl5ngaoTJw3zKVhqHAeBXU0,11696
+google/oauth2/_service_account_async.py,sha256=5-HBGWoHhbWpCRbd34YiopQepEsEf8gSiuMlSm5hN84,5131
+google/oauth2/challenges.py,sha256=kGzZiSdQbvRFxnZnqHAlSOOcoEKNfatO1euJo40CAmY,7171
+google/oauth2/credentials.py,sha256=X_fCJPxwdyeRGj4FA7ApSYrg6c8Ad_69JCMp5yP_34w,25785
+google/oauth2/gdch_credentials.py,sha256=CY6iPnPuc2OCIe1Zujwg1Mu9QSl1iGJqGOy6TkUleHw,9007
+google/oauth2/id_token.py,sha256=XxUfOR4Pb8QnLizNhwV9QB-JvEU7lFGoC3NWcqTvKgI,12067
+google/oauth2/reauth.py,sha256=neMOsDBYoDwJJuoaG7tKUZEb9NmzLVRQqWrwWwya2DA,12790
+google/oauth2/service_account.py,sha256=72ssPVU_izSORqstvsAUx_4DoX1e01lkS2ucUDG9UV0,31365
+google/oauth2/sts.py,sha256=GjpFEvByl3EzyGt2v1kev6rvP7_uSQ3eTlpBK9vUhSc,6699
+google/oauth2/utils.py,sha256=4crAdpKbDtobpQfXJc3uF6Zm6F3IzffvRSo-9h_515w,6315
+google_auth-2.27.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+google_auth-2.27.0.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
+google_auth-2.27.0.dist-info/METADATA,sha256=8CkUICmlRvw5h-Wz1qu9Xnt8pabXxFmNzPwPj6eyI0c,4690
+google_auth-2.27.0.dist-info/RECORD,,
+google_auth-2.27.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google_auth-2.27.0.dist-info/WHEEL,sha256=P2T-6epvtXQ2cBOE_U1K4_noqlJFN3tj15djMgEu4NM,110
+google_auth-2.27.0.dist-info/top_level.txt,sha256=BWmDiI8eoKfseZ5-MI2AW66GLJLNH4Lz23AXXTrIlyQ,23
diff --git a/Lib/site-packages/google_auth-2.27.0.dist-info/REQUESTED b/Lib/site-packages/google_auth-2.27.0.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/google_auth-2.27.0.dist-info/WHEEL b/Lib/site-packages/google_auth-2.27.0.dist-info/WHEEL
new file mode 100644
index 0000000..f31e450
--- /dev/null
+++ b/Lib/site-packages/google_auth-2.27.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.41.3)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/Lib/site-packages/google_auth-2.27.0.dist-info/top_level.txt b/Lib/site-packages/google_auth-2.27.0.dist-info/top_level.txt
new file mode 100644
index 0000000..64f26a3
--- /dev/null
+++ b/Lib/site-packages/google_auth-2.27.0.dist-info/top_level.txt
@@ -0,0 +1,3 @@
+google
+scripts
+testing
diff --git a/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/INSTALLER b/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/LICENSE b/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/LICENSE
new file mode 100644
index 0000000..261eeb9
--- /dev/null
+++ b/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/METADATA b/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/METADATA
new file mode 100644
index 0000000..cdf15e5
--- /dev/null
+++ b/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/METADATA
@@ -0,0 +1,62 @@
+Metadata-Version: 2.1
+Name: google-auth-httplib2
+Version: 0.2.0
+Summary: Google Authentication Library: httplib2 transport
+Home-page: https://github.com/GoogleCloudPlatform/google-auth-library-python-httplib2
+Author: Google Cloud Platform
+Author-email: googleapis-packages@google.com
+License: Apache 2.0
+Keywords: google auth oauth client
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Internet :: WWW/HTTP
+License-File: LICENSE
+Requires-Dist: google-auth
+Requires-Dist: httplib2 >=0.19.0
+
+``httplib2`` Transport for Google Auth
+======================================
+
+|pypi|
+
+This library provides an `httplib2`_ transport for `google-auth`_.
+
+.. note:: ``httplib`` has lots of problems such as lack of threadsafety
+ and insecure usage of TLS. Using it is highly discouraged. This
+ library is intended to help existing users of ``oauth2client`` migrate to
+ ``google-auth``.
+
+.. |pypi| image:: https://img.shields.io/pypi/v/google-auth-httplib2.svg
+ :target: https://pypi.python.org/pypi/google-auth-httplib2
+
+.. _httplib2: https://github.com/httplib2/httplib2
+.. _google-auth: https://github.com/GoogleCloudPlatform/google-auth-library-python/
+
+Installing
+----------
+
+You can install using `pip`_::
+
+ $ pip install google-auth-httplib2
+
+.. _pip: https://pip.pypa.io/en/stable/
+
+License
+-------
+
+Apache 2.0 - See `the LICENSE`_ for more information.
+
+.. _the LICENSE: https://github.com/GoogleCloudPlatform/google-auth-library-python/blob/main/LICENSE
diff --git a/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/RECORD b/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/RECORD
new file mode 100644
index 0000000..59521a7
--- /dev/null
+++ b/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/RECORD
@@ -0,0 +1,9 @@
+__pycache__/google_auth_httplib2.cpython-312.pyc,,
+google_auth_httplib2-0.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+google_auth_httplib2-0.2.0.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
+google_auth_httplib2-0.2.0.dist-info/METADATA,sha256=KbbX6r2o-hWv_6Mr3PkYxa96q59OBXu2mF9WJ8MMlJk,2179
+google_auth_httplib2-0.2.0.dist-info/RECORD,,
+google_auth_httplib2-0.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google_auth_httplib2-0.2.0.dist-info/WHEEL,sha256=P2T-6epvtXQ2cBOE_U1K4_noqlJFN3tj15djMgEu4NM,110
+google_auth_httplib2-0.2.0.dist-info/top_level.txt,sha256=xQr4X91CsNWr1mw3rrOH8mKnYLOW_Uhr5U7moYxkq4E,21
+google_auth_httplib2.py,sha256=Z-VdVWlB8Rcrwn4Q2MU9SHHJ5HZkPYHfDu8xHKsBeQI,10211
diff --git a/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/REQUESTED b/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/WHEEL b/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/WHEEL
new file mode 100644
index 0000000..f31e450
--- /dev/null
+++ b/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.41.3)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/top_level.txt b/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/top_level.txt
new file mode 100644
index 0000000..f8b63c2
--- /dev/null
+++ b/Lib/site-packages/google_auth_httplib2-0.2.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+google_auth_httplib2
diff --git a/Lib/site-packages/google_auth_httplib2.py b/Lib/site-packages/google_auth_httplib2.py
new file mode 100644
index 0000000..863ae64
--- /dev/null
+++ b/Lib/site-packages/google_auth_httplib2.py
@@ -0,0 +1,307 @@
+# Copyright 2016 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Transport adapter for httplib2."""
+
+from __future__ import absolute_import
+
+import http.client
+import logging
+
+from google.auth import exceptions
+from google.auth import transport
+import httplib2
+
+
+_LOGGER = logging.getLogger(__name__)
+# Properties present in file-like streams / buffers.
+_STREAM_PROPERTIES = ("read", "seek", "tell")
+
+
+class _Response(transport.Response):
+ """httplib2 transport response adapter.
+
+ Args:
+ response (httplib2.Response): The raw httplib2 response.
+ data (bytes): The response body.
+ """
+
+ def __init__(self, response, data):
+ self._response = response
+ self._data = data
+
+ @property
+ def status(self):
+ """int: The HTTP status code."""
+ return self._response.status
+
+ @property
+ def headers(self):
+ """Mapping[str, str]: The HTTP response headers."""
+ return dict(self._response)
+
+ @property
+ def data(self):
+ """bytes: The response body."""
+ return self._data
+
+
+class Request(transport.Request):
+ """httplib2 request adapter.
+
+ This class is used internally for making requests using various transports
+ in a consistent way. If you use :class:`AuthorizedHttp` you do not need
+ to construct or use this class directly.
+
+ This class can be useful if you want to manually refresh a
+ :class:`~google.auth.credentials.Credentials` instance::
+
+ import google_auth_httplib2
+ import httplib2
+
+ http = httplib2.Http()
+ request = google_auth_httplib2.Request(http)
+
+ credentials.refresh(request)
+
+ Args:
+ http (httplib2.Http): The underlying http object to use to make
+ requests.
+
+ .. automethod:: __call__
+ """
+
+ def __init__(self, http):
+ self.http = http
+
+ def __call__(
+ self, url, method="GET", body=None, headers=None, timeout=None, **kwargs
+ ):
+ """Make an HTTP request using httplib2.
+
+ Args:
+ url (str): The URI to be requested.
+ method (str): The HTTP method to use for the request. Defaults
+ to 'GET'.
+ body (bytes): The payload / body in HTTP request.
+ headers (Mapping[str, str]): Request headers.
+ timeout (Optional[int]): The number of seconds to wait for a
+ response from the server. This is ignored by httplib2 and will
+ issue a warning.
+ kwargs: Additional arguments passed throught to the underlying
+ :meth:`httplib2.Http.request` method.
+
+ Returns:
+ google.auth.transport.Response: The HTTP response.
+
+ Raises:
+ google.auth.exceptions.TransportError: If any exception occurred.
+ """
+ if timeout is not None:
+ _LOGGER.warning(
+ "httplib2 transport does not support per-request timeout. "
+ "Set the timeout when constructing the httplib2.Http instance."
+ )
+
+ try:
+ _LOGGER.debug("Making request: %s %s", method, url)
+ response, data = self.http.request(
+ url, method=method, body=body, headers=headers, **kwargs
+ )
+ return _Response(response, data)
+ # httplib2 should catch the lower http error, this is a bug and
+ # needs to be fixed there. Catch the error for the meanwhile.
+ except (httplib2.HttpLib2Error, http.client.HTTPException) as exc:
+ raise exceptions.TransportError(exc)
+
+
+def _make_default_http():
+ """Returns a default httplib2.Http instance."""
+ return httplib2.Http()
+
+
+class AuthorizedHttp(object):
+ """A httplib2 HTTP class with credentials.
+
+ This class is used to perform requests to API endpoints that require
+ authorization::
+
+ from google.auth.transport._httplib2 import AuthorizedHttp
+
+ authed_http = AuthorizedHttp(credentials)
+
+ response = authed_http.request(
+ 'https://www.googleapis.com/storage/v1/b')
+
+ This class implements :meth:`request` in the same way as
+ :class:`httplib2.Http` and can usually be used just like any other
+ instance of :class:``httplib2.Http`.
+
+ The underlying :meth:`request` implementation handles adding the
+ credentials' headers to the request and refreshing credentials as needed.
+ """
+
+ def __init__(
+ self,
+ credentials,
+ http=None,
+ refresh_status_codes=transport.DEFAULT_REFRESH_STATUS_CODES,
+ max_refresh_attempts=transport.DEFAULT_MAX_REFRESH_ATTEMPTS,
+ ):
+ """
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials
+ to add to the request.
+ http (httplib2.Http): The underlying HTTP object to
+ use to make requests. If not specified, a
+ :class:`httplib2.Http` instance will be constructed.
+ refresh_status_codes (Sequence[int]): Which HTTP status codes
+ indicate that credentials should be refreshed and the request
+ should be retried.
+ max_refresh_attempts (int): The maximum number of times to attempt
+ to refresh the credentials and retry the request.
+ """
+
+ if http is None:
+ http = _make_default_http()
+
+ self.http = http
+ self.credentials = credentials
+ self._refresh_status_codes = refresh_status_codes
+ self._max_refresh_attempts = max_refresh_attempts
+ # Request instance used by internal methods (for example,
+ # credentials.refresh).
+ self._request = Request(self.http)
+
+ def close(self):
+ """Calls httplib2's Http.close"""
+ self.http.close()
+
+ def request(
+ self,
+ uri,
+ method="GET",
+ body=None,
+ headers=None,
+ redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+ connection_type=None,
+ **kwargs
+ ):
+ """Implementation of httplib2's Http.request."""
+
+ _credential_refresh_attempt = kwargs.pop("_credential_refresh_attempt", 0)
+
+ # Make a copy of the headers. They will be modified by the credentials
+ # and we want to pass the original headers if we recurse.
+ request_headers = headers.copy() if headers is not None else {}
+
+ self.credentials.before_request(self._request, method, uri, request_headers)
+
+ # Check if the body is a file-like stream, and if so, save the body
+ # stream position so that it can be restored in case of refresh.
+ body_stream_position = None
+ if all(getattr(body, stream_prop, None) for stream_prop in _STREAM_PROPERTIES):
+ body_stream_position = body.tell()
+
+ # Make the request.
+ response, content = self.http.request(
+ uri,
+ method,
+ body=body,
+ headers=request_headers,
+ redirections=redirections,
+ connection_type=connection_type,
+ **kwargs
+ )
+
+ # If the response indicated that the credentials needed to be
+ # refreshed, then refresh the credentials and re-attempt the
+ # request.
+ # A stored token may expire between the time it is retrieved and
+ # the time the request is made, so we may need to try twice.
+ if (
+ response.status in self._refresh_status_codes
+ and _credential_refresh_attempt < self._max_refresh_attempts
+ ):
+
+ _LOGGER.info(
+ "Refreshing credentials due to a %s response. Attempt %s/%s.",
+ response.status,
+ _credential_refresh_attempt + 1,
+ self._max_refresh_attempts,
+ )
+
+ self.credentials.refresh(self._request)
+
+ # Restore the body's stream position if needed.
+ if body_stream_position is not None:
+ body.seek(body_stream_position)
+
+ # Recurse. Pass in the original headers, not our modified set.
+ return self.request(
+ uri,
+ method,
+ body=body,
+ headers=headers,
+ redirections=redirections,
+ connection_type=connection_type,
+ _credential_refresh_attempt=_credential_refresh_attempt + 1,
+ **kwargs
+ )
+
+ return response, content
+
+ def add_certificate(self, key, cert, domain, password=None):
+ """Proxy to httplib2.Http.add_certificate."""
+ self.http.add_certificate(key, cert, domain, password=password)
+
+ @property
+ def connections(self):
+ """Proxy to httplib2.Http.connections."""
+ return self.http.connections
+
+ @connections.setter
+ def connections(self, value):
+ """Proxy to httplib2.Http.connections."""
+ self.http.connections = value
+
+ @property
+ def follow_redirects(self):
+ """Proxy to httplib2.Http.follow_redirects."""
+ return self.http.follow_redirects
+
+ @follow_redirects.setter
+ def follow_redirects(self, value):
+ """Proxy to httplib2.Http.follow_redirects."""
+ self.http.follow_redirects = value
+
+ @property
+ def timeout(self):
+ """Proxy to httplib2.Http.timeout."""
+ return self.http.timeout
+
+ @timeout.setter
+ def timeout(self, value):
+ """Proxy to httplib2.Http.timeout."""
+ self.http.timeout = value
+
+ @property
+ def redirect_codes(self):
+ """Proxy to httplib2.Http.redirect_codes."""
+ return self.http.redirect_codes
+
+ @redirect_codes.setter
+ def redirect_codes(self, value):
+ """Proxy to httplib2.Http.redirect_codes."""
+ self.http.redirect_codes = value
diff --git a/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/INSTALLER b/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/LICENSE b/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/METADATA b/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/METADATA
new file mode 100644
index 0000000..99875cd
--- /dev/null
+++ b/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/METADATA
@@ -0,0 +1,82 @@
+Metadata-Version: 2.1
+Name: google-auth-oauthlib
+Version: 1.2.0
+Summary: Google Authentication Library
+Home-page: https://github.com/GoogleCloudPlatform/google-auth-library-python-oauthlib
+Author: Google Cloud Platform
+Author-email: googleapis-packages@google.com
+License: Apache 2.0
+Keywords: google auth oauth client oauthlib
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Internet :: WWW/HTTP
+Requires-Python: >=3.6
+License-File: LICENSE
+Requires-Dist: google-auth >=2.15.0
+Requires-Dist: requests-oauthlib >=0.7.0
+Provides-Extra: tool
+Requires-Dist: click >=6.0.0 ; extra == 'tool'
+
+oauthlib integration for Google Auth
+====================================
+
+|pypi|
+
+This library provides `oauthlib`_ integration with `google-auth`_.
+
+.. |build| image:: https://travis-ci.org/googleapis/google-auth-library-python-oauthlib.svg?branch=main
+ :target: https://googleapis.dev/python/google-auth-oauthlib/latest/index.html
+.. |pypi| image:: https://img.shields.io/pypi/v/google-auth-oauthlib.svg
+ :target: https://pypi.python.org/pypi/google-auth-oauthlib
+
+.. _oauthlib: https://github.com/idan/oauthlib
+.. _google-auth: https://github.com/googleapis/google-auth-library-python
+
+Installing
+----------
+
+You can install using `pip`_::
+
+ $ pip install google-auth-oauthlib
+
+.. _pip: https://pip.pypa.io/en/stable/
+
+Documentation
+-------------
+
+The latest documentation is available at `google-auth-oauthlib.googleapis.dev`_.
+
+.. _google-auth-oauthlib.googleapis.dev: https://googleapis.dev/python/google-auth-oauthlib/latest/index.html
+
+Supported Python Versions
+-------------------------
+Python >= 3.6
+
+
+Unsupported Python Versions
+---------------------------
+
+Python == 2.7, Python == 3.5.
+
+The last version of this library compatible with Python 2.7 and 3.5 is
+`google-auth-oauthlib==0.4.1`.
+
+License
+-------
+
+Apache 2.0 - See `the LICENSE`_ for more information.
+
+.. _the LICENSE: https://github.com/googleapis/google-auth-library-python-oauthlib/blob/main/LICENSE
diff --git a/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/RECORD b/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/RECORD
new file mode 100644
index 0000000..eb840c1
--- /dev/null
+++ b/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/RECORD
@@ -0,0 +1,25 @@
+../../Scripts/google-oauthlib-tool.exe,sha256=MQnxwKFBrTklRk0YJG9QgRwBeyjgj95yqjI2J21hYlU,108439
+docs/__pycache__/conf.cpython-312.pyc,,
+docs/conf.py,sha256=yxdRFyXsTmlqMkXGWei6J67-wbndjU0-TpcQQjHTHQg,12421
+google_auth_oauthlib-1.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+google_auth_oauthlib-1.2.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
+google_auth_oauthlib-1.2.0.dist-info/METADATA,sha256=kbw74oxR4lDtiUGq43tf894VBichxoT9gYb3QyBTSKU,2696
+google_auth_oauthlib-1.2.0.dist-info/RECORD,,
+google_auth_oauthlib-1.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google_auth_oauthlib-1.2.0.dist-info/WHEEL,sha256=P2T-6epvtXQ2cBOE_U1K4_noqlJFN3tj15djMgEu4NM,110
+google_auth_oauthlib-1.2.0.dist-info/entry_points.txt,sha256=DL3GRTp3HgwLxdBZCPkz9o8sK9cnENZMmu09EzAovkk,88
+google_auth_oauthlib-1.2.0.dist-info/top_level.txt,sha256=1UIrxRzACA8j-HW3CjjUghyqzTMl5NOXgMwHPpTY-BU,42
+google_auth_oauthlib/__init__.py,sha256=TTUgKl-F0eTBxBmbwDwDbIN-L33LmD4JbNKwXE-HngU,846
+google_auth_oauthlib/__pycache__/__init__.cpython-312.pyc,,
+google_auth_oauthlib/__pycache__/flow.cpython-312.pyc,,
+google_auth_oauthlib/__pycache__/helpers.cpython-312.pyc,,
+google_auth_oauthlib/__pycache__/interactive.cpython-312.pyc,,
+google_auth_oauthlib/flow.py,sha256=dINL-6b0UjC9Vj8icj9JnEtFDs8ym1QQhvs6IV7TDA8,19608
+google_auth_oauthlib/helpers.py,sha256=C6oIR1TZsizfH61SxdhaLI7utttHgI4Ww0tMrLhYnAY,5729
+google_auth_oauthlib/interactive.py,sha256=BW-L8FToU4iXf8Fd85U6kWJvjWSyS5FVvYvAIDe4NVM,6063
+google_auth_oauthlib/tool/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google_auth_oauthlib/tool/__main__.py,sha256=BeX0cYaM50IInJ1c5U3ukxtuI9q1no-KIJhW7ynJBfs,3835
+google_auth_oauthlib/tool/__pycache__/__init__.cpython-312.pyc,,
+google_auth_oauthlib/tool/__pycache__/__main__.cpython-312.pyc,,
+scripts/readme-gen/__pycache__/readme_gen.cpython-312.pyc,,
+scripts/readme-gen/readme_gen.py,sha256=hPFWJnVXqwe6LOGJoWePgI7E97QBBQQTx6WvHW5ucZ0,1750
diff --git a/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/REQUESTED b/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/WHEEL b/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/WHEEL
new file mode 100644
index 0000000..f31e450
--- /dev/null
+++ b/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.41.3)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/entry_points.txt b/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/entry_points.txt
new file mode 100644
index 0000000..a33f32f
--- /dev/null
+++ b/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/entry_points.txt
@@ -0,0 +1,2 @@
+[console_scripts]
+google-oauthlib-tool = google_auth_oauthlib.tool.__main__:main [tool]
diff --git a/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/top_level.txt b/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/top_level.txt
new file mode 100644
index 0000000..14627fc
--- /dev/null
+++ b/Lib/site-packages/google_auth_oauthlib-1.2.0.dist-info/top_level.txt
@@ -0,0 +1,4 @@
+docs
+google_auth_oauthlib
+scripts
+testing
diff --git a/Lib/site-packages/google_auth_oauthlib/__init__.py b/Lib/site-packages/google_auth_oauthlib/__init__.py
new file mode 100644
index 0000000..1905f9a
--- /dev/null
+++ b/Lib/site-packages/google_auth_oauthlib/__init__.py
@@ -0,0 +1,23 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""oauthlib integration for Google Auth
+
+This library provides `oauthlib `__
+integration with `google-auth `__.
+"""
+
+from .interactive import get_user_credentials
+
+__all__ = ["get_user_credentials"]
diff --git a/Lib/site-packages/google_auth_oauthlib/flow.py b/Lib/site-packages/google_auth_oauthlib/flow.py
new file mode 100644
index 0000000..c5d8bce
--- /dev/null
+++ b/Lib/site-packages/google_auth_oauthlib/flow.py
@@ -0,0 +1,505 @@
+# Copyright 2016 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""OAuth 2.0 Authorization Flow
+
+This module provides integration with `requests-oauthlib`_ for running the
+`OAuth 2.0 Authorization Flow`_ and acquiring user credentials. See
+`Using OAuth 2.0 to Access Google APIs`_ for an overview of OAuth 2.0
+authorization scenarios Google APIs support.
+
+Here's an example of using :class:`InstalledAppFlow`::
+
+ from google_auth_oauthlib.flow import InstalledAppFlow
+
+ # Create the flow using the client secrets file from the Google API
+ # Console.
+ flow = InstalledAppFlow.from_client_secrets_file(
+ 'client_secrets.json',
+ scopes=['profile', 'email'])
+
+ flow.run_local_server()
+
+ # You can use flow.credentials, or you can just get a requests session
+ # using flow.authorized_session.
+ session = flow.authorized_session()
+
+ profile_info = session.get(
+ 'https://www.googleapis.com/userinfo/v2/me').json()
+
+ print(profile_info)
+ # {'name': '...', 'email': '...', ...}
+
+.. _requests-oauthlib: http://requests-oauthlib.readthedocs.io/en/latest/
+.. _OAuth 2.0 Authorization Flow:
+ https://tools.ietf.org/html/rfc6749#section-1.2
+.. _Using OAuth 2.0 to Access Google APIs:
+ https://developers.google.com/identity/protocols/oauth2
+
+"""
+from base64 import urlsafe_b64encode
+import hashlib
+import json
+import logging
+
+try:
+ from secrets import SystemRandom
+except ImportError: # pragma: NO COVER
+ from random import SystemRandom
+from string import ascii_letters, digits
+import webbrowser
+import wsgiref.simple_server
+import wsgiref.util
+
+import google.auth.transport.requests
+import google.oauth2.credentials
+
+import google_auth_oauthlib.helpers
+
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class Flow(object):
+ """OAuth 2.0 Authorization Flow
+
+ This class uses a :class:`requests_oauthlib.OAuth2Session` instance at
+ :attr:`oauth2session` to perform all of the OAuth 2.0 logic. This class
+ just provides convenience methods and sane defaults for doing Google's
+ particular flavors of OAuth 2.0.
+
+ Typically you'll construct an instance of this flow using
+ :meth:`from_client_secrets_file` and a `client secrets file`_ obtained
+ from the `Google API Console`_.
+
+ .. _client secrets file:
+ https://developers.google.com/identity/protocols/oauth2/web-server
+ #creatingcred
+ .. _Google API Console:
+ https://console.developers.google.com/apis/credentials
+ """
+
+ def __init__(
+ self,
+ oauth2session,
+ client_type,
+ client_config,
+ redirect_uri=None,
+ code_verifier=None,
+ autogenerate_code_verifier=True,
+ ):
+ """
+ Args:
+ oauth2session (requests_oauthlib.OAuth2Session):
+ The OAuth 2.0 session from ``requests-oauthlib``.
+ client_type (str): The client type, either ``web`` or
+ ``installed``.
+ client_config (Mapping[str, Any]): The client
+ configuration in the Google `client secrets`_ format.
+ redirect_uri (str): The OAuth 2.0 redirect URI if known at flow
+ creation time. Otherwise, it will need to be set using
+ :attr:`redirect_uri`.
+ code_verifier (str): random string of 43-128 chars used to verify
+ the key exchange.using PKCE.
+ autogenerate_code_verifier (bool): If true, auto-generate a
+ code_verifier.
+ .. _client secrets:
+ https://github.com/googleapis/google-api-python-client/blob
+ /main/docs/client-secrets.md
+ """
+ self.client_type = client_type
+ """str: The client type, either ``'web'`` or ``'installed'``"""
+ self.client_config = client_config[client_type]
+ """Mapping[str, Any]: The OAuth 2.0 client configuration."""
+ self.oauth2session = oauth2session
+ """requests_oauthlib.OAuth2Session: The OAuth 2.0 session."""
+ self.redirect_uri = redirect_uri
+ self.code_verifier = code_verifier
+ self.autogenerate_code_verifier = autogenerate_code_verifier
+
+ @classmethod
+ def from_client_config(cls, client_config, scopes, **kwargs):
+ """Creates a :class:`requests_oauthlib.OAuth2Session` from client
+ configuration loaded from a Google-format client secrets file.
+
+ Args:
+ client_config (Mapping[str, Any]): The client
+ configuration in the Google `client secrets`_ format.
+ scopes (Sequence[str]): The list of scopes to request during the
+ flow.
+ kwargs: Any additional parameters passed to
+ :class:`requests_oauthlib.OAuth2Session`
+
+ Returns:
+ Flow: The constructed Flow instance.
+
+ Raises:
+ ValueError: If the client configuration is not in the correct
+ format.
+
+ .. _client secrets:
+ https://github.com/googleapis/google-api-python-client/blob/main/docs/client-secrets.md
+ """
+ if "web" in client_config:
+ client_type = "web"
+ elif "installed" in client_config:
+ client_type = "installed"
+ else:
+ raise ValueError("Client secrets must be for a web or installed app.")
+
+ # these args cannot be passed to requests_oauthlib.OAuth2Session
+ code_verifier = kwargs.pop("code_verifier", None)
+ autogenerate_code_verifier = kwargs.pop("autogenerate_code_verifier", None)
+
+ (
+ session,
+ client_config,
+ ) = google_auth_oauthlib.helpers.session_from_client_config(
+ client_config, scopes, **kwargs
+ )
+
+ redirect_uri = kwargs.get("redirect_uri", None)
+
+ return cls(
+ session,
+ client_type,
+ client_config,
+ redirect_uri,
+ code_verifier,
+ autogenerate_code_verifier,
+ )
+
+ @classmethod
+ def from_client_secrets_file(cls, client_secrets_file, scopes, **kwargs):
+ """Creates a :class:`Flow` instance from a Google client secrets file.
+
+ Args:
+ client_secrets_file (str): The path to the client secrets .json
+ file.
+ scopes (Sequence[str]): The list of scopes to request during the
+ flow.
+ kwargs: Any additional parameters passed to
+ :class:`requests_oauthlib.OAuth2Session`
+
+ Returns:
+ Flow: The constructed Flow instance.
+ """
+ with open(client_secrets_file, "r") as json_file:
+ client_config = json.load(json_file)
+
+ return cls.from_client_config(client_config, scopes=scopes, **kwargs)
+
+ @property
+ def redirect_uri(self):
+ """The OAuth 2.0 redirect URI. Pass-through to
+ ``self.oauth2session.redirect_uri``."""
+ return self.oauth2session.redirect_uri
+
+ @redirect_uri.setter
+ def redirect_uri(self, value):
+ """The OAuth 2.0 redirect URI. Pass-through to
+ ``self.oauth2session.redirect_uri``."""
+ self.oauth2session.redirect_uri = value
+
+ def authorization_url(self, **kwargs):
+ """Generates an authorization URL.
+
+ This is the first step in the OAuth 2.0 Authorization Flow. The user's
+ browser should be redirected to the returned URL.
+
+ This method calls
+ :meth:`requests_oauthlib.OAuth2Session.authorization_url`
+ and specifies the client configuration's authorization URI (usually
+ Google's authorization server) and specifies that "offline" access is
+ desired. This is required in order to obtain a refresh token.
+
+ Args:
+ kwargs: Additional arguments passed through to
+ :meth:`requests_oauthlib.OAuth2Session.authorization_url`
+
+ Returns:
+ Tuple[str, str]: The generated authorization URL and state. The
+ user must visit the URL to complete the flow. The state is used
+ when completing the flow to verify that the request originated
+ from your application. If your application is using a different
+ :class:`Flow` instance to obtain the token, you will need to
+ specify the ``state`` when constructing the :class:`Flow`.
+ """
+ kwargs.setdefault("access_type", "offline")
+ if self.autogenerate_code_verifier:
+ chars = ascii_letters + digits + "-._~"
+ rnd = SystemRandom()
+ random_verifier = [rnd.choice(chars) for _ in range(0, 128)]
+ self.code_verifier = "".join(random_verifier)
+
+ if self.code_verifier:
+ code_hash = hashlib.sha256()
+ code_hash.update(str.encode(self.code_verifier))
+ unencoded_challenge = code_hash.digest()
+ b64_challenge = urlsafe_b64encode(unencoded_challenge)
+ code_challenge = b64_challenge.decode().split("=")[0]
+ kwargs.setdefault("code_challenge", code_challenge)
+ kwargs.setdefault("code_challenge_method", "S256")
+ url, state = self.oauth2session.authorization_url(
+ self.client_config["auth_uri"], **kwargs
+ )
+
+ return url, state
+
+ def fetch_token(self, **kwargs):
+ """Completes the Authorization Flow and obtains an access token.
+
+ This is the final step in the OAuth 2.0 Authorization Flow. This is
+ called after the user consents.
+
+ This method calls
+ :meth:`requests_oauthlib.OAuth2Session.fetch_token`
+ and specifies the client configuration's token URI (usually Google's
+ token server).
+
+ Args:
+ kwargs: Arguments passed through to
+ :meth:`requests_oauthlib.OAuth2Session.fetch_token`. At least
+ one of ``code`` or ``authorization_response`` must be
+ specified.
+
+ Returns:
+ Mapping[str, str]: The obtained tokens. Typically, you will not use
+ return value of this function and instead use
+ :meth:`credentials` to obtain a
+ :class:`~google.auth.credentials.Credentials` instance.
+ """
+ kwargs.setdefault("client_secret", self.client_config["client_secret"])
+ kwargs.setdefault("code_verifier", self.code_verifier)
+ return self.oauth2session.fetch_token(self.client_config["token_uri"], **kwargs)
+
+ @property
+ def credentials(self):
+ """Returns credentials from the OAuth 2.0 session.
+
+ :meth:`fetch_token` must be called before accessing this. This method
+ constructs a :class:`google.oauth2.credentials.Credentials` class using
+ the session's token and the client config.
+
+ Returns:
+ google.oauth2.credentials.Credentials: The constructed credentials.
+
+ Raises:
+ ValueError: If there is no access token in the session.
+ """
+ return google_auth_oauthlib.helpers.credentials_from_session(
+ self.oauth2session, self.client_config
+ )
+
+ def authorized_session(self):
+ """Returns a :class:`requests.Session` authorized with credentials.
+
+ :meth:`fetch_token` must be called before this method. This method
+ constructs a :class:`google.auth.transport.requests.AuthorizedSession`
+ class using this flow's :attr:`credentials`.
+
+ Returns:
+ google.auth.transport.requests.AuthorizedSession: The constructed
+ session.
+ """
+ return google.auth.transport.requests.AuthorizedSession(self.credentials)
+
+
+class InstalledAppFlow(Flow):
+ """Authorization flow helper for installed applications.
+
+ This :class:`Flow` subclass makes it easier to perform the
+ `Installed Application Authorization Flow`_. This flow is useful for
+ local development or applications that are installed on a desktop operating
+ system.
+
+ This flow uses a local server strategy provided by :meth:`run_local_server`.
+
+ Example::
+
+ from google_auth_oauthlib.flow import InstalledAppFlow
+
+ flow = InstalledAppFlow.from_client_secrets_file(
+ 'client_secrets.json',
+ scopes=['profile', 'email'])
+
+ flow.run_local_server()
+
+ session = flow.authorized_session()
+
+ profile_info = session.get(
+ 'https://www.googleapis.com/userinfo/v2/me').json()
+
+ print(profile_info)
+ # {'name': '...', 'email': '...', ...}
+
+
+ Note that this isn't the only way to accomplish the installed
+ application flow, just one of the most common. You can use the
+ :class:`Flow` class to perform the same flow with different methods of
+ presenting the authorization URL to the user or obtaining the authorization
+ response, such as using an embedded web view.
+
+ .. _Installed Application Authorization Flow:
+ https://github.com/googleapis/google-api-python-client/blob/main/docs/oauth-installed.md
+ """
+
+ _DEFAULT_AUTH_PROMPT_MESSAGE = (
+ "Please visit this URL to authorize this application: {url}"
+ )
+ """str: The message to display when prompting the user for
+ authorization."""
+ _DEFAULT_AUTH_CODE_MESSAGE = "Enter the authorization code: "
+ """str: The message to display when prompting the user for the
+ authorization code. Used only by the console strategy."""
+
+ _DEFAULT_WEB_SUCCESS_MESSAGE = (
+ "The authentication flow has completed. You may close this window."
+ )
+
+ def run_local_server(
+ self,
+ host="localhost",
+ bind_addr=None,
+ port=8080,
+ authorization_prompt_message=_DEFAULT_AUTH_PROMPT_MESSAGE,
+ success_message=_DEFAULT_WEB_SUCCESS_MESSAGE,
+ open_browser=True,
+ redirect_uri_trailing_slash=True,
+ timeout_seconds=None,
+ token_audience=None,
+ browser=None,
+ **kwargs
+ ):
+ """Run the flow using the server strategy.
+
+ The server strategy instructs the user to open the authorization URL in
+ their browser and will attempt to automatically open the URL for them.
+ It will start a local web server to listen for the authorization
+ response. Once authorization is complete the authorization server will
+ redirect the user's browser to the local web server. The web server
+ will get the authorization code from the response and shutdown. The
+ code is then exchanged for a token.
+
+ Args:
+ host (str): The hostname for the local redirect server. This will
+ be served over http, not https.
+ bind_addr (str): Optionally provide an ip address for the redirect
+ server to listen on when it is not the same as host
+ (e.g. in a container). Default value is None,
+ which means that the redirect server will listen
+ on the ip address specified in the host parameter.
+ port (int): The port for the local redirect server.
+ authorization_prompt_message (str | None): The message to display to tell
+ the user to navigate to the authorization URL. If None or empty,
+ don't display anything.
+ success_message (str): The message to display in the web browser
+ the authorization flow is complete.
+ open_browser (bool): Whether or not to open the authorization URL
+ in the user's browser.
+ redirect_uri_trailing_slash (bool): whether or not to add trailing
+ slash when constructing the redirect_uri. Default value is True.
+ timeout_seconds (int): It will raise an error after the timeout timing
+ if there are no credentials response. The value is in seconds.
+ When set to None there is no timeout.
+ Default value is None.
+ token_audience (str): Passed along with the request for an access
+ token. Determines the endpoints with which the token can be
+ used. Optional.
+ browser (str): specify which browser to open for authentication. If not
+ specified this defaults to default browser.
+ kwargs: Additional keyword arguments passed through to
+ :meth:`authorization_url`.
+
+ Returns:
+ google.oauth2.credentials.Credentials: The OAuth 2.0 credentials
+ for the user.
+ """
+ wsgi_app = _RedirectWSGIApp(success_message)
+ # Fail fast if the address is occupied
+ wsgiref.simple_server.WSGIServer.allow_reuse_address = False
+ local_server = wsgiref.simple_server.make_server(
+ bind_addr or host, port, wsgi_app, handler_class=_WSGIRequestHandler
+ )
+
+ redirect_uri_format = (
+ "http://{}:{}/" if redirect_uri_trailing_slash else "http://{}:{}"
+ )
+ self.redirect_uri = redirect_uri_format.format(host, local_server.server_port)
+ auth_url, _ = self.authorization_url(**kwargs)
+
+ if open_browser:
+ # if browser is None it defaults to default browser
+ webbrowser.get(browser).open(auth_url, new=1, autoraise=True)
+
+ if authorization_prompt_message:
+ print(authorization_prompt_message.format(url=auth_url))
+
+ local_server.timeout = timeout_seconds
+ local_server.handle_request()
+
+ # Note: using https here because oauthlib is very picky that
+ # OAuth 2.0 should only occur over https.
+ authorization_response = wsgi_app.last_request_uri.replace("http", "https")
+ self.fetch_token(
+ authorization_response=authorization_response, audience=token_audience
+ )
+
+ # This closes the socket
+ local_server.server_close()
+
+ return self.credentials
+
+
+class _WSGIRequestHandler(wsgiref.simple_server.WSGIRequestHandler):
+ """Custom WSGIRequestHandler.
+
+ Uses a named logger instead of printing to stderr.
+ """
+
+ def log_message(self, format, *args):
+ # pylint: disable=redefined-builtin
+ # (format is the argument name defined in the superclass.)
+ _LOGGER.info(format, *args)
+
+
+class _RedirectWSGIApp(object):
+ """WSGI app to handle the authorization redirect.
+
+ Stores the request URI and displays the given success message.
+ """
+
+ def __init__(self, success_message):
+ """
+ Args:
+ success_message (str): The message to display in the web browser
+ the authorization flow is complete.
+ """
+ self.last_request_uri = None
+ self._success_message = success_message
+
+ def __call__(self, environ, start_response):
+ """WSGI Callable.
+
+ Args:
+ environ (Mapping[str, Any]): The WSGI environment.
+ start_response (Callable[str, list]): The WSGI start_response
+ callable.
+
+ Returns:
+ Iterable[bytes]: The response body.
+ """
+ start_response("200 OK", [("Content-type", "text/plain; charset=utf-8")])
+ self.last_request_uri = wsgiref.util.request_uri(environ)
+ return [self._success_message.encode("utf-8")]
diff --git a/Lib/site-packages/google_auth_oauthlib/helpers.py b/Lib/site-packages/google_auth_oauthlib/helpers.py
new file mode 100644
index 0000000..25462f4
--- /dev/null
+++ b/Lib/site-packages/google_auth_oauthlib/helpers.py
@@ -0,0 +1,151 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Integration helpers.
+
+This module provides helpers for integrating with `requests-oauthlib`_.
+Typically, you'll want to use the higher-level helpers in
+:mod:`google_auth_oauthlib.flow`.
+
+.. _requests-oauthlib: http://requests-oauthlib.readthedocs.io/en/latest/
+"""
+
+import datetime
+import json
+
+from google.auth import external_account_authorized_user
+import google.oauth2.credentials
+import requests_oauthlib
+
+_REQUIRED_CONFIG_KEYS = frozenset(("auth_uri", "token_uri", "client_id"))
+
+
+def session_from_client_config(client_config, scopes, **kwargs):
+ """Creates a :class:`requests_oauthlib.OAuth2Session` from client
+ configuration loaded from a Google-format client secrets file.
+
+ Args:
+ client_config (Mapping[str, Any]): The client
+ configuration in the Google `client secrets`_ format.
+ scopes (Sequence[str]): The list of scopes to request during the
+ flow.
+ kwargs: Any additional parameters passed to
+ :class:`requests_oauthlib.OAuth2Session`
+
+ Raises:
+ ValueError: If the client configuration is not in the correct
+ format.
+
+ Returns:
+ Tuple[requests_oauthlib.OAuth2Session, Mapping[str, Any]]: The new
+ oauthlib session and the validated client configuration.
+
+ .. _client secrets:
+ https://github.com/googleapis/google-api-python-client/blob/main/docs/client-secrets.md
+ """
+
+ if "web" in client_config:
+ config = client_config["web"]
+ elif "installed" in client_config:
+ config = client_config["installed"]
+ else:
+ raise ValueError("Client secrets must be for a web or installed app.")
+
+ if not _REQUIRED_CONFIG_KEYS.issubset(config.keys()):
+ raise ValueError("Client secrets is not in the correct format.")
+
+ session = requests_oauthlib.OAuth2Session(
+ client_id=config["client_id"], scope=scopes, **kwargs
+ )
+
+ return session, client_config
+
+
+def session_from_client_secrets_file(client_secrets_file, scopes, **kwargs):
+ """Creates a :class:`requests_oauthlib.OAuth2Session` instance from a
+ Google-format client secrets file.
+
+ Args:
+ client_secrets_file (str): The path to the `client secrets`_ .json
+ file.
+ scopes (Sequence[str]): The list of scopes to request during the
+ flow.
+ kwargs: Any additional parameters passed to
+ :class:`requests_oauthlib.OAuth2Session`
+
+ Returns:
+ Tuple[requests_oauthlib.OAuth2Session, Mapping[str, Any]]: The new
+ oauthlib session and the validated client configuration.
+
+ .. _client secrets:
+ https://github.com/googleapis/google-api-python-client/blob/main/docs/client-secrets.md
+ """
+ with open(client_secrets_file, "r") as json_file:
+ client_config = json.load(json_file)
+
+ return session_from_client_config(client_config, scopes, **kwargs)
+
+
+def credentials_from_session(session, client_config=None):
+ """Creates :class:`google.oauth2.credentials.Credentials` from a
+ :class:`requests_oauthlib.OAuth2Session`.
+
+ :meth:`fetch_token` must be called on the session before before calling
+ this. This uses the session's auth token and the provided client
+ configuration to create :class:`google.oauth2.credentials.Credentials`.
+ This allows you to use the credentials from the session with Google
+ API client libraries.
+
+ Args:
+ session (requests_oauthlib.OAuth2Session): The OAuth 2.0 session.
+ client_config (Mapping[str, Any]): The subset of the client
+ configuration to use. For example, if you have a web client
+ you would pass in `client_config['web']`.
+
+ Returns:
+ google.oauth2.credentials.Credentials: The constructed credentials.
+
+ Raises:
+ ValueError: If there is no access token in the session.
+ """
+ client_config = client_config if client_config is not None else {}
+
+ if not session.token:
+ raise ValueError(
+ "There is no access token for this session, did you call " "fetch_token?"
+ )
+
+ if "3pi" in client_config:
+ credentials = external_account_authorized_user.Credentials(
+ token=session.token["access_token"],
+ refresh_token=session.token.get("refresh_token"),
+ token_url=client_config.get("token_uri"),
+ client_id=client_config.get("client_id"),
+ client_secret=client_config.get("client_secret"),
+ token_info_url=client_config.get("token_info_url"),
+ scopes=session.scope,
+ )
+ else:
+ credentials = google.oauth2.credentials.Credentials(
+ session.token["access_token"],
+ refresh_token=session.token.get("refresh_token"),
+ id_token=session.token.get("id_token"),
+ token_uri=client_config.get("token_uri"),
+ client_id=client_config.get("client_id"),
+ client_secret=client_config.get("client_secret"),
+ scopes=session.scope,
+ granted_scopes=session.token.get("scope"),
+ )
+ credentials.expiry = datetime.datetime.utcfromtimestamp(session.token["expires_at"])
+ return credentials
diff --git a/Lib/site-packages/google_auth_oauthlib/interactive.py b/Lib/site-packages/google_auth_oauthlib/interactive.py
new file mode 100644
index 0000000..b1ed990
--- /dev/null
+++ b/Lib/site-packages/google_auth_oauthlib/interactive.py
@@ -0,0 +1,172 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Get user credentials from interactive code environments.
+
+This module contains helpers for getting user credentials from interactive
+code environments installed on a development machine, such as Jupyter
+notebooks.
+"""
+
+from __future__ import absolute_import
+
+import contextlib
+import socket
+
+import google_auth_oauthlib.flow
+
+
+LOCALHOST = "localhost"
+DEFAULT_PORTS_TO_TRY = 100
+
+
+def is_port_open(port):
+ """Check if a port is open on localhost.
+ Based on StackOverflow answer: https://stackoverflow.com/a/43238489/101923
+ Parameters
+ ----------
+ port : int
+ A port to check on localhost.
+ Returns
+ -------
+ is_open : bool
+ True if a socket can be opened at the requested port.
+ """
+ with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
+ try:
+ sock.bind((LOCALHOST, port))
+ sock.listen(1)
+ except socket.error:
+ is_open = False
+ else:
+ is_open = True
+ return is_open
+
+
+def find_open_port(start=8080, stop=None):
+ """Find an open port between ``start`` and ``stop``.
+ Parameters
+ ----------
+ start : Optional[int]
+ Beginning of range of ports to try. Defaults to 8080.
+ stop : Optional[int]
+ End of range of ports to try (not including exactly equals ``stop``).
+ This function tries 100 possible ports if no ``stop`` is specified.
+ Returns
+ -------
+ Optional[int]
+ ``None`` if no open port is found, otherwise an integer indicating an
+ open port.
+ """
+ if not stop:
+ stop = start + DEFAULT_PORTS_TO_TRY
+
+ for port in range(start, stop):
+ if is_port_open(port):
+ return port
+
+ # No open ports found.
+ return None
+
+
+def get_user_credentials(
+ scopes, client_id, client_secret, minimum_port=8080, maximum_port=None
+):
+ """Gets credentials associated with your Google user account.
+
+ This function authenticates using your user credentials by going through
+ the OAuth 2.0 flow. You'll open a browser window to authenticate to your
+ Google account. The permissions it requests correspond to the scopes
+ you've provided.
+
+ To obtain the ``client_id`` and ``client_secret``, create an **OAuth
+ client ID** with application type **Other** from the `Credentials page on
+ the Google Developer's Console
+ `_. Learn more
+ with the `Authenticating as an end user
+ `_ guide.
+
+ Args:
+ scopes (Sequence[str]):
+ A list of scopes to use when authenticating to Google APIs. See
+ the `list of OAuth 2.0 scopes for Google APIs
+ `_.
+ client_id (str):
+ A string that identifies your application to Google APIs. Find
+ this value in the `Credentials page on the Google Developer's
+ Console
+ `_.
+ client_secret (str):
+ A string that verifies your application to Google APIs. Find this
+ value in the `Credentials page on the Google Developer's Console
+ `_.
+ minimum_port (int):
+ Beginning of range of ports to try for redirect URI HTTP server.
+ Defaults to 8080.
+ maximum_port (Optional[int]):
+ End of range of ports to try (not including exactly equals ``stop``).
+ This function tries 100 possible ports if no ``stop`` is specified.
+
+ Returns:
+ google.oauth2.credentials.Credentials:
+ The OAuth 2.0 credentials for the user.
+
+ Examples:
+ Get credentials for your user account and use them to run a query
+ with BigQuery::
+
+ import google_auth_oauthlib
+
+ # TODO: Create a client ID for your project.
+ client_id = "YOUR-CLIENT-ID.apps.googleusercontent.com"
+ client_secret = "abc_ThIsIsAsEcReT"
+
+ # TODO: Choose the needed scopes for your applications.
+ scopes = ["https://www.googleapis.com/auth/cloud-platform"]
+
+ credentials = google_auth_oauthlib.get_user_credentials(
+ scopes, client_id, client_secret
+ )
+
+ # 1. Open the link.
+ # 2. Authorize the application to have access to your account.
+ # 3. Copy and paste the authorization code to the prompt.
+
+ # Use the credentials to construct a client for Google APIs.
+ from google.cloud import bigquery
+
+ bigquery_client = bigquery.Client(
+ credentials=credentials, project="your-project-id"
+ )
+ print(list(bigquery_client.query("SELECT 1").result()))
+ """
+
+ client_config = {
+ "installed": {
+ "client_id": client_id,
+ "client_secret": client_secret,
+ "auth_uri": "https://accounts.google.com/o/oauth2/auth",
+ "token_uri": "https://oauth2.googleapis.com/token",
+ }
+ }
+
+ app_flow = google_auth_oauthlib.flow.InstalledAppFlow.from_client_config(
+ client_config, scopes=scopes
+ )
+
+ port = find_open_port(start=minimum_port, stop=maximum_port)
+ if not port:
+ raise ConnectionError("Could not find open port.")
+
+ return app_flow.run_local_server(host=LOCALHOST, port=port)
diff --git a/Lib/site-packages/google_auth_oauthlib/tool/__init__.py b/Lib/site-packages/google_auth_oauthlib/tool/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/google_auth_oauthlib/tool/__main__.py b/Lib/site-packages/google_auth_oauthlib/tool/__main__.py
new file mode 100644
index 0000000..db679a1
--- /dev/null
+++ b/Lib/site-packages/google_auth_oauthlib/tool/__main__.py
@@ -0,0 +1,124 @@
+# Copyright (C) 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Command-line tool for obtaining authorization and credentials from a user.
+
+This tool uses the OAuth 2.0 Authorization Code grant as described in
+`section 1.3.1 of RFC6749`_ and implemeted by
+:class:`google_auth_oauthlib.flow.Flow`.
+
+This tool is intended for assist developers in obtaining credentials
+for testing applications where it may not be possible or easy to run a
+complete OAuth 2.0 authorization flow, especially in the case of code
+samples or embedded devices without input / display capabilities.
+
+This is not intended for production use where a combination of
+companion and on-device applications should complete the OAuth 2.0
+authorization flow to get authorization from the users.
+
+.. _section 1.3.1 of RFC6749: https://tools.ietf.org/html/rfc6749#section-1.3.1
+"""
+
+import json
+import os
+import os.path
+
+import click
+
+import google_auth_oauthlib.flow
+
+
+APP_NAME = "google-oauthlib-tool"
+DEFAULT_CREDENTIALS_FILENAME = "credentials.json"
+
+
+@click.command()
+@click.option(
+ "--client-secrets",
+ metavar="",
+ required=True,
+ help="Path to OAuth2 client secret JSON file.",
+)
+@click.option(
+ "--scope",
+ multiple=True,
+ metavar="",
+ required=True,
+ help="API scopes to authorize access for.",
+)
+@click.option(
+ "--save",
+ is_flag=True,
+ metavar="",
+ show_default=True,
+ default=False,
+ help="Save the credentials to file.",
+)
+@click.option(
+ "--credentials",
+ metavar="",
+ show_default=True,
+ default=os.path.join(click.get_app_dir(APP_NAME), DEFAULT_CREDENTIALS_FILENAME),
+ help="Path to store OAuth2 credentials.",
+)
+def main(client_secrets, scope, save, credentials):
+ """Command-line tool for obtaining authorization and credentials from a user.
+
+ This tool uses the OAuth 2.0 Authorization Code grant as described
+ in section 1.3.1 of RFC6749:
+ https://tools.ietf.org/html/rfc6749#section-1.3.1
+
+ This tool is intended for assist developers in obtaining credentials
+ for testing applications or samples.
+
+ This is not intended for production use where a combination of
+ companion and on-device applications should complete the OAuth 2.0
+ authorization flow to get authorization from the users.
+
+ """
+
+ flow = google_auth_oauthlib.flow.InstalledAppFlow.from_client_secrets_file(
+ client_secrets, scopes=scope
+ )
+
+ creds = flow.run_local_server()
+
+ creds_data = {
+ "token": creds.token,
+ "refresh_token": creds.refresh_token,
+ "token_uri": creds.token_uri,
+ "client_id": creds.client_id,
+ "client_secret": creds.client_secret,
+ "scopes": creds.scopes,
+ }
+
+ if save:
+ del creds_data["token"]
+
+ config_path = os.path.dirname(credentials)
+ if config_path and not os.path.isdir(config_path):
+ os.makedirs(config_path)
+
+ with open(credentials, "w") as outfile:
+ json.dump(creds_data, outfile)
+
+ click.echo("credentials saved: %s" % credentials)
+
+ else:
+ click.echo(json.dumps(creds_data))
+
+
+if __name__ == "__main__":
+ # pylint doesn't realize that click has changed the function signature.
+ main() # pylint: disable=no-value-for-parameter
diff --git a/Lib/site-packages/googleapiclient/__init__.py b/Lib/site-packages/googleapiclient/__init__.py
new file mode 100644
index 0000000..c9218dd
--- /dev/null
+++ b/Lib/site-packages/googleapiclient/__init__.py
@@ -0,0 +1,27 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+
+try: # Python 2.7+
+ from logging import NullHandler
+except ImportError:
+
+ class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+
+logging.getLogger(__name__).addHandler(NullHandler())
diff --git a/Lib/site-packages/googleapiclient/_auth.py b/Lib/site-packages/googleapiclient/_auth.py
new file mode 100644
index 0000000..065b2ec
--- /dev/null
+++ b/Lib/site-packages/googleapiclient/_auth.py
@@ -0,0 +1,167 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for authentication using oauth2client or google-auth."""
+
+import httplib2
+
+try:
+ import google.auth
+ import google.auth.credentials
+
+ HAS_GOOGLE_AUTH = True
+except ImportError: # pragma: NO COVER
+ HAS_GOOGLE_AUTH = False
+
+try:
+ import google_auth_httplib2
+except ImportError: # pragma: NO COVER
+ google_auth_httplib2 = None
+
+try:
+ import oauth2client
+ import oauth2client.client
+
+ HAS_OAUTH2CLIENT = True
+except ImportError: # pragma: NO COVER
+ HAS_OAUTH2CLIENT = False
+
+
+def credentials_from_file(filename, scopes=None, quota_project_id=None):
+ """Returns credentials loaded from a file."""
+ if HAS_GOOGLE_AUTH:
+ credentials, _ = google.auth.load_credentials_from_file(
+ filename, scopes=scopes, quota_project_id=quota_project_id
+ )
+ return credentials
+ else:
+ raise EnvironmentError(
+ "client_options.credentials_file is only supported in google-auth."
+ )
+
+
+def default_credentials(scopes=None, quota_project_id=None):
+ """Returns Application Default Credentials."""
+ if HAS_GOOGLE_AUTH:
+ credentials, _ = google.auth.default(
+ scopes=scopes, quota_project_id=quota_project_id
+ )
+ return credentials
+ elif HAS_OAUTH2CLIENT:
+ if scopes is not None or quota_project_id is not None:
+ raise EnvironmentError(
+ "client_options.scopes and client_options.quota_project_id are not supported in oauth2client."
+ "Please install google-auth."
+ )
+ return oauth2client.client.GoogleCredentials.get_application_default()
+ else:
+ raise EnvironmentError(
+ "No authentication library is available. Please install either "
+ "google-auth or oauth2client."
+ )
+
+
+def with_scopes(credentials, scopes):
+ """Scopes the credentials if necessary.
+
+ Args:
+ credentials (Union[
+ google.auth.credentials.Credentials,
+ oauth2client.client.Credentials]): The credentials to scope.
+ scopes (Sequence[str]): The list of scopes.
+
+ Returns:
+ Union[google.auth.credentials.Credentials,
+ oauth2client.client.Credentials]: The scoped credentials.
+ """
+ if HAS_GOOGLE_AUTH and isinstance(credentials, google.auth.credentials.Credentials):
+ return google.auth.credentials.with_scopes_if_required(credentials, scopes)
+ else:
+ try:
+ if credentials.create_scoped_required():
+ return credentials.create_scoped(scopes)
+ else:
+ return credentials
+ except AttributeError:
+ return credentials
+
+
+def authorized_http(credentials):
+ """Returns an http client that is authorized with the given credentials.
+
+ Args:
+ credentials (Union[
+ google.auth.credentials.Credentials,
+ oauth2client.client.Credentials]): The credentials to use.
+
+ Returns:
+ Union[httplib2.Http, google_auth_httplib2.AuthorizedHttp]: An
+ authorized http client.
+ """
+ from googleapiclient.http import build_http
+
+ if HAS_GOOGLE_AUTH and isinstance(credentials, google.auth.credentials.Credentials):
+ if google_auth_httplib2 is None:
+ raise ValueError(
+ "Credentials from google.auth specified, but "
+ "google-api-python-client is unable to use these credentials "
+ "unless google-auth-httplib2 is installed. Please install "
+ "google-auth-httplib2."
+ )
+ return google_auth_httplib2.AuthorizedHttp(credentials, http=build_http())
+ else:
+ return credentials.authorize(build_http())
+
+
+def refresh_credentials(credentials):
+ # Refresh must use a new http instance, as the one associated with the
+ # credentials could be a AuthorizedHttp or an oauth2client-decorated
+ # Http instance which would cause a weird recursive loop of refreshing
+ # and likely tear a hole in spacetime.
+ refresh_http = httplib2.Http()
+ if HAS_GOOGLE_AUTH and isinstance(credentials, google.auth.credentials.Credentials):
+ request = google_auth_httplib2.Request(refresh_http)
+ return credentials.refresh(request)
+ else:
+ return credentials.refresh(refresh_http)
+
+
+def apply_credentials(credentials, headers):
+ # oauth2client and google-auth have the same interface for this.
+ if not is_valid(credentials):
+ refresh_credentials(credentials)
+ return credentials.apply(headers)
+
+
+def is_valid(credentials):
+ if HAS_GOOGLE_AUTH and isinstance(credentials, google.auth.credentials.Credentials):
+ return credentials.valid
+ else:
+ return (
+ credentials.access_token is not None
+ and not credentials.access_token_expired
+ )
+
+
+def get_credentials_from_http(http):
+ if http is None:
+ return None
+ elif hasattr(http.request, "credentials"):
+ return http.request.credentials
+ elif hasattr(http, "credentials") and not isinstance(
+ http.credentials, httplib2.Credentials
+ ):
+ return http.credentials
+ else:
+ return None
diff --git a/Lib/site-packages/googleapiclient/_helpers.py b/Lib/site-packages/googleapiclient/_helpers.py
new file mode 100644
index 0000000..17b8a17
--- /dev/null
+++ b/Lib/site-packages/googleapiclient/_helpers.py
@@ -0,0 +1,207 @@
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper functions for commonly used utilities."""
+
+import functools
+import inspect
+import logging
+import urllib
+
+logger = logging.getLogger(__name__)
+
+POSITIONAL_WARNING = "WARNING"
+POSITIONAL_EXCEPTION = "EXCEPTION"
+POSITIONAL_IGNORE = "IGNORE"
+POSITIONAL_SET = frozenset(
+ [POSITIONAL_WARNING, POSITIONAL_EXCEPTION, POSITIONAL_IGNORE]
+)
+
+positional_parameters_enforcement = POSITIONAL_WARNING
+
+_SYM_LINK_MESSAGE = "File: {0}: Is a symbolic link."
+_IS_DIR_MESSAGE = "{0}: Is a directory"
+_MISSING_FILE_MESSAGE = "Cannot access {0}: No such file or directory"
+
+
+def positional(max_positional_args):
+ """A decorator to declare that only the first N arguments may be positional.
+
+ This decorator makes it easy to support Python 3 style keyword-only
+ parameters. For example, in Python 3 it is possible to write::
+
+ def fn(pos1, *, kwonly1=None, kwonly2=None):
+ ...
+
+ All named parameters after ``*`` must be a keyword::
+
+ fn(10, 'kw1', 'kw2') # Raises exception.
+ fn(10, kwonly1='kw1') # Ok.
+
+ Example
+ ^^^^^^^
+
+ To define a function like above, do::
+
+ @positional(1)
+ def fn(pos1, kwonly1=None, kwonly2=None):
+ ...
+
+ If no default value is provided to a keyword argument, it becomes a
+ required keyword argument::
+
+ @positional(0)
+ def fn(required_kw):
+ ...
+
+ This must be called with the keyword parameter::
+
+ fn() # Raises exception.
+ fn(10) # Raises exception.
+ fn(required_kw=10) # Ok.
+
+ When defining instance or class methods always remember to account for
+ ``self`` and ``cls``::
+
+ class MyClass(object):
+
+ @positional(2)
+ def my_method(self, pos1, kwonly1=None):
+ ...
+
+ @classmethod
+ @positional(2)
+ def my_method(cls, pos1, kwonly1=None):
+ ...
+
+ The positional decorator behavior is controlled by
+ ``_helpers.positional_parameters_enforcement``, which may be set to
+ ``POSITIONAL_EXCEPTION``, ``POSITIONAL_WARNING`` or
+ ``POSITIONAL_IGNORE`` to raise an exception, log a warning, or do
+ nothing, respectively, if a declaration is violated.
+
+ Args:
+ max_positional_arguments: Maximum number of positional arguments. All
+ parameters after this index must be
+ keyword only.
+
+ Returns:
+ A decorator that prevents using arguments after max_positional_args
+ from being used as positional parameters.
+
+ Raises:
+ TypeError: if a keyword-only argument is provided as a positional
+ parameter, but only if
+ _helpers.positional_parameters_enforcement is set to
+ POSITIONAL_EXCEPTION.
+ """
+
+ def positional_decorator(wrapped):
+ @functools.wraps(wrapped)
+ def positional_wrapper(*args, **kwargs):
+ if len(args) > max_positional_args:
+ plural_s = ""
+ if max_positional_args != 1:
+ plural_s = "s"
+ message = (
+ "{function}() takes at most {args_max} positional "
+ "argument{plural} ({args_given} given)".format(
+ function=wrapped.__name__,
+ args_max=max_positional_args,
+ args_given=len(args),
+ plural=plural_s,
+ )
+ )
+ if positional_parameters_enforcement == POSITIONAL_EXCEPTION:
+ raise TypeError(message)
+ elif positional_parameters_enforcement == POSITIONAL_WARNING:
+ logger.warning(message)
+ return wrapped(*args, **kwargs)
+
+ return positional_wrapper
+
+ if isinstance(max_positional_args, int):
+ return positional_decorator
+ else:
+ args, _, _, defaults, _, _, _ = inspect.getfullargspec(max_positional_args)
+ return positional(len(args) - len(defaults))(max_positional_args)
+
+
+def parse_unique_urlencoded(content):
+ """Parses unique key-value parameters from urlencoded content.
+
+ Args:
+ content: string, URL-encoded key-value pairs.
+
+ Returns:
+ dict, The key-value pairs from ``content``.
+
+ Raises:
+ ValueError: if one of the keys is repeated.
+ """
+ urlencoded_params = urllib.parse.parse_qs(content)
+ params = {}
+ for key, value in urlencoded_params.items():
+ if len(value) != 1:
+ msg = "URL-encoded content contains a repeated value:" "%s -> %s" % (
+ key,
+ ", ".join(value),
+ )
+ raise ValueError(msg)
+ params[key] = value[0]
+ return params
+
+
+def update_query_params(uri, params):
+ """Updates a URI with new query parameters.
+
+ If a given key from ``params`` is repeated in the ``uri``, then
+ the URI will be considered invalid and an error will occur.
+
+ If the URI is valid, then each value from ``params`` will
+ replace the corresponding value in the query parameters (if
+ it exists).
+
+ Args:
+ uri: string, A valid URI, with potential existing query parameters.
+ params: dict, A dictionary of query parameters.
+
+ Returns:
+ The same URI but with the new query parameters added.
+ """
+ parts = urllib.parse.urlparse(uri)
+ query_params = parse_unique_urlencoded(parts.query)
+ query_params.update(params)
+ new_query = urllib.parse.urlencode(query_params)
+ new_parts = parts._replace(query=new_query)
+ return urllib.parse.urlunparse(new_parts)
+
+
+def _add_query_parameter(url, name, value):
+ """Adds a query parameter to a url.
+
+ Replaces the current value if it already exists in the URL.
+
+ Args:
+ url: string, url to add the query parameter to.
+ name: string, query parameter name.
+ value: string, query parameter value.
+
+ Returns:
+ Updated query parameter. Does not update the url if value is None.
+ """
+ if value is None:
+ return url
+ else:
+ return update_query_params(url, {name: value})
diff --git a/Lib/site-packages/googleapiclient/channel.py b/Lib/site-packages/googleapiclient/channel.py
new file mode 100644
index 0000000..37bda1e
--- /dev/null
+++ b/Lib/site-packages/googleapiclient/channel.py
@@ -0,0 +1,315 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Channel notifications support.
+
+Classes and functions to support channel subscriptions and notifications
+on those channels.
+
+Notes:
+ - This code is based on experimental APIs and is subject to change.
+ - Notification does not do deduplication of notification ids, that's up to
+ the receiver.
+ - Storing the Channel between calls is up to the caller.
+
+
+Example setting up a channel:
+
+ # Create a new channel that gets notifications via webhook.
+ channel = new_webhook_channel("https://example.com/my_web_hook")
+
+ # Store the channel, keyed by 'channel.id'. Store it before calling the
+ # watch method because notifications may start arriving before the watch
+ # method returns.
+ ...
+
+ resp = service.objects().watchAll(
+ bucket="some_bucket_id", body=channel.body()).execute()
+ channel.update(resp)
+
+ # Store the channel, keyed by 'channel.id'. Store it after being updated
+ # since the resource_id value will now be correct, and that's needed to
+ # stop a subscription.
+ ...
+
+
+An example Webhook implementation using webapp2. Note that webapp2 puts
+headers in a case insensitive dictionary, as headers aren't guaranteed to
+always be upper case.
+
+ id = self.request.headers[X_GOOG_CHANNEL_ID]
+
+ # Retrieve the channel by id.
+ channel = ...
+
+ # Parse notification from the headers, including validating the id.
+ n = notification_from_headers(channel, self.request.headers)
+
+ # Do app specific stuff with the notification here.
+ if n.resource_state == 'sync':
+ # Code to handle sync state.
+ elif n.resource_state == 'exists':
+ # Code to handle the exists state.
+ elif n.resource_state == 'not_exists':
+ # Code to handle the not exists state.
+
+
+Example of unsubscribing.
+
+ service.channels().stop(channel.body()).execute()
+"""
+from __future__ import absolute_import
+
+import datetime
+import uuid
+
+from googleapiclient import _helpers as util
+from googleapiclient import errors
+
+# The unix time epoch starts at midnight 1970.
+EPOCH = datetime.datetime(1970, 1, 1)
+
+# Map the names of the parameters in the JSON channel description to
+# the parameter names we use in the Channel class.
+CHANNEL_PARAMS = {
+ "address": "address",
+ "id": "id",
+ "expiration": "expiration",
+ "params": "params",
+ "resourceId": "resource_id",
+ "resourceUri": "resource_uri",
+ "type": "type",
+ "token": "token",
+}
+
+X_GOOG_CHANNEL_ID = "X-GOOG-CHANNEL-ID"
+X_GOOG_MESSAGE_NUMBER = "X-GOOG-MESSAGE-NUMBER"
+X_GOOG_RESOURCE_STATE = "X-GOOG-RESOURCE-STATE"
+X_GOOG_RESOURCE_URI = "X-GOOG-RESOURCE-URI"
+X_GOOG_RESOURCE_ID = "X-GOOG-RESOURCE-ID"
+
+
+def _upper_header_keys(headers):
+ new_headers = {}
+ for k, v in headers.items():
+ new_headers[k.upper()] = v
+ return new_headers
+
+
+class Notification(object):
+ """A Notification from a Channel.
+
+ Notifications are not usually constructed directly, but are returned
+ from functions like notification_from_headers().
+
+ Attributes:
+ message_number: int, The unique id number of this notification.
+ state: str, The state of the resource being monitored.
+ uri: str, The address of the resource being monitored.
+ resource_id: str, The unique identifier of the version of the resource at
+ this event.
+ """
+
+ @util.positional(5)
+ def __init__(self, message_number, state, resource_uri, resource_id):
+ """Notification constructor.
+
+ Args:
+ message_number: int, The unique id number of this notification.
+ state: str, The state of the resource being monitored. Can be one
+ of "exists", "not_exists", or "sync".
+ resource_uri: str, The address of the resource being monitored.
+ resource_id: str, The identifier of the watched resource.
+ """
+ self.message_number = message_number
+ self.state = state
+ self.resource_uri = resource_uri
+ self.resource_id = resource_id
+
+
+class Channel(object):
+ """A Channel for notifications.
+
+ Usually not constructed directly, instead it is returned from helper
+ functions like new_webhook_channel().
+
+ Attributes:
+ type: str, The type of delivery mechanism used by this channel. For
+ example, 'web_hook'.
+ id: str, A UUID for the channel.
+ token: str, An arbitrary string associated with the channel that
+ is delivered to the target address with each event delivered
+ over this channel.
+ address: str, The address of the receiving entity where events are
+ delivered. Specific to the channel type.
+ expiration: int, The time, in milliseconds from the epoch, when this
+ channel will expire.
+ params: dict, A dictionary of string to string, with additional parameters
+ controlling delivery channel behavior.
+ resource_id: str, An opaque id that identifies the resource that is
+ being watched. Stable across different API versions.
+ resource_uri: str, The canonicalized ID of the watched resource.
+ """
+
+ @util.positional(5)
+ def __init__(
+ self,
+ type,
+ id,
+ token,
+ address,
+ expiration=None,
+ params=None,
+ resource_id="",
+ resource_uri="",
+ ):
+ """Create a new Channel.
+
+ In user code, this Channel constructor will not typically be called
+ manually since there are functions for creating channels for each specific
+ type with a more customized set of arguments to pass.
+
+ Args:
+ type: str, The type of delivery mechanism used by this channel. For
+ example, 'web_hook'.
+ id: str, A UUID for the channel.
+ token: str, An arbitrary string associated with the channel that
+ is delivered to the target address with each event delivered
+ over this channel.
+ address: str, The address of the receiving entity where events are
+ delivered. Specific to the channel type.
+ expiration: int, The time, in milliseconds from the epoch, when this
+ channel will expire.
+ params: dict, A dictionary of string to string, with additional parameters
+ controlling delivery channel behavior.
+ resource_id: str, An opaque id that identifies the resource that is
+ being watched. Stable across different API versions.
+ resource_uri: str, The canonicalized ID of the watched resource.
+ """
+ self.type = type
+ self.id = id
+ self.token = token
+ self.address = address
+ self.expiration = expiration
+ self.params = params
+ self.resource_id = resource_id
+ self.resource_uri = resource_uri
+
+ def body(self):
+ """Build a body from the Channel.
+
+ Constructs a dictionary that's appropriate for passing into watch()
+ methods as the value of body argument.
+
+ Returns:
+ A dictionary representation of the channel.
+ """
+ result = {
+ "id": self.id,
+ "token": self.token,
+ "type": self.type,
+ "address": self.address,
+ }
+ if self.params:
+ result["params"] = self.params
+ if self.resource_id:
+ result["resourceId"] = self.resource_id
+ if self.resource_uri:
+ result["resourceUri"] = self.resource_uri
+ if self.expiration:
+ result["expiration"] = self.expiration
+
+ return result
+
+ def update(self, resp):
+ """Update a channel with information from the response of watch().
+
+ When a request is sent to watch() a resource, the response returned
+ from the watch() request is a dictionary with updated channel information,
+ such as the resource_id, which is needed when stopping a subscription.
+
+ Args:
+ resp: dict, The response from a watch() method.
+ """
+ for json_name, param_name in CHANNEL_PARAMS.items():
+ value = resp.get(json_name)
+ if value is not None:
+ setattr(self, param_name, value)
+
+
+def notification_from_headers(channel, headers):
+ """Parse a notification from the webhook request headers, validate
+ the notification, and return a Notification object.
+
+ Args:
+ channel: Channel, The channel that the notification is associated with.
+ headers: dict, A dictionary like object that contains the request headers
+ from the webhook HTTP request.
+
+ Returns:
+ A Notification object.
+
+ Raises:
+ errors.InvalidNotificationError if the notification is invalid.
+ ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int.
+ """
+ headers = _upper_header_keys(headers)
+ channel_id = headers[X_GOOG_CHANNEL_ID]
+ if channel.id != channel_id:
+ raise errors.InvalidNotificationError(
+ "Channel id mismatch: %s != %s" % (channel.id, channel_id)
+ )
+ else:
+ message_number = int(headers[X_GOOG_MESSAGE_NUMBER])
+ state = headers[X_GOOG_RESOURCE_STATE]
+ resource_uri = headers[X_GOOG_RESOURCE_URI]
+ resource_id = headers[X_GOOG_RESOURCE_ID]
+ return Notification(message_number, state, resource_uri, resource_id)
+
+
+@util.positional(2)
+def new_webhook_channel(url, token=None, expiration=None, params=None):
+ """Create a new webhook Channel.
+
+ Args:
+ url: str, URL to post notifications to.
+ token: str, An arbitrary string associated with the channel that
+ is delivered to the target address with each notification delivered
+ over this channel.
+ expiration: datetime.datetime, A time in the future when the channel
+ should expire. Can also be None if the subscription should use the
+ default expiration. Note that different services may have different
+ limits on how long a subscription lasts. Check the response from the
+ watch() method to see the value the service has set for an expiration
+ time.
+ params: dict, Extra parameters to pass on channel creation. Currently
+ not used for webhook channels.
+ """
+ expiration_ms = 0
+ if expiration:
+ delta = expiration - EPOCH
+ expiration_ms = (
+ delta.microseconds / 1000 + (delta.seconds + delta.days * 24 * 3600) * 1000
+ )
+ if expiration_ms < 0:
+ expiration_ms = 0
+
+ return Channel(
+ "web_hook",
+ str(uuid.uuid4()),
+ token,
+ url,
+ expiration=expiration_ms,
+ params=params,
+ )
diff --git a/Lib/site-packages/googleapiclient/discovery.py b/Lib/site-packages/googleapiclient/discovery.py
new file mode 100644
index 0000000..d137b2e
--- /dev/null
+++ b/Lib/site-packages/googleapiclient/discovery.py
@@ -0,0 +1,1582 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client for discovery based APIs.
+
+A client library for Google's discovery based APIs.
+"""
+from __future__ import absolute_import
+
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
+__all__ = ["build", "build_from_document", "fix_method_name", "key2param"]
+
+from collections import OrderedDict
+import collections.abc
+
+# Standard library imports
+import copy
+from email.generator import BytesGenerator
+from email.mime.multipart import MIMEMultipart
+from email.mime.nonmultipart import MIMENonMultipart
+import http.client as http_client
+import io
+import json
+import keyword
+import logging
+import mimetypes
+import os
+import re
+import urllib
+
+import google.api_core.client_options
+from google.auth.exceptions import MutualTLSChannelError
+from google.auth.transport import mtls
+from google.oauth2 import service_account
+
+# Third-party imports
+import httplib2
+import uritemplate
+
+try:
+ import google_auth_httplib2
+except ImportError: # pragma: NO COVER
+ google_auth_httplib2 = None
+
+# Local imports
+from googleapiclient import _auth, mimeparse
+from googleapiclient._helpers import _add_query_parameter, positional
+from googleapiclient.errors import (
+ HttpError,
+ InvalidJsonError,
+ MediaUploadSizeError,
+ UnacceptableMimeTypeError,
+ UnknownApiNameOrVersion,
+ UnknownFileType,
+)
+from googleapiclient.http import (
+ BatchHttpRequest,
+ HttpMock,
+ HttpMockSequence,
+ HttpRequest,
+ MediaFileUpload,
+ MediaUpload,
+ build_http,
+)
+from googleapiclient.model import JsonModel, MediaModel, RawModel
+from googleapiclient.schema import Schemas
+
+# The client library requires a version of httplib2 that supports RETRIES.
+httplib2.RETRIES = 1
+
+logger = logging.getLogger(__name__)
+
+URITEMPLATE = re.compile("{[^}]*}")
+VARNAME = re.compile("[a-zA-Z0-9_-]+")
+DISCOVERY_URI = (
+ "https://www.googleapis.com/discovery/v1/apis/" "{api}/{apiVersion}/rest"
+)
+V1_DISCOVERY_URI = DISCOVERY_URI
+V2_DISCOVERY_URI = (
+ "https://{api}.googleapis.com/$discovery/rest?" "version={apiVersion}"
+)
+DEFAULT_METHOD_DOC = "A description of how to use this function"
+HTTP_PAYLOAD_METHODS = frozenset(["PUT", "POST", "PATCH"])
+
+_MEDIA_SIZE_BIT_SHIFTS = {"KB": 10, "MB": 20, "GB": 30, "TB": 40}
+BODY_PARAMETER_DEFAULT_VALUE = {"description": "The request body.", "type": "object"}
+MEDIA_BODY_PARAMETER_DEFAULT_VALUE = {
+ "description": (
+ "The filename of the media request body, or an instance "
+ "of a MediaUpload object."
+ ),
+ "type": "string",
+ "required": False,
+}
+MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE = {
+ "description": (
+ "The MIME type of the media request body, or an instance "
+ "of a MediaUpload object."
+ ),
+ "type": "string",
+ "required": False,
+}
+_PAGE_TOKEN_NAMES = ("pageToken", "nextPageToken")
+
+# Parameters controlling mTLS behavior. See https://google.aip.dev/auth/4114.
+GOOGLE_API_USE_CLIENT_CERTIFICATE = "GOOGLE_API_USE_CLIENT_CERTIFICATE"
+GOOGLE_API_USE_MTLS_ENDPOINT = "GOOGLE_API_USE_MTLS_ENDPOINT"
+
+# Parameters accepted by the stack, but not visible via discovery.
+# TODO(dhermes): Remove 'userip' in 'v2'.
+STACK_QUERY_PARAMETERS = frozenset(["trace", "pp", "userip", "strict"])
+STACK_QUERY_PARAMETER_DEFAULT_VALUE = {"type": "string", "location": "query"}
+
+# Library-specific reserved words beyond Python keywords.
+RESERVED_WORDS = frozenset(["body"])
+
+# patch _write_lines to avoid munging '\r' into '\n'
+# ( https://bugs.python.org/issue18886 https://bugs.python.org/issue19003 )
+class _BytesGenerator(BytesGenerator):
+ _write_lines = BytesGenerator.write
+
+
+def fix_method_name(name):
+ """Fix method names to avoid '$' characters and reserved word conflicts.
+
+ Args:
+ name: string, method name.
+
+ Returns:
+ The name with '_' appended if the name is a reserved word and '$' and '-'
+ replaced with '_'.
+ """
+ name = name.replace("$", "_").replace("-", "_")
+ if keyword.iskeyword(name) or name in RESERVED_WORDS:
+ return name + "_"
+ else:
+ return name
+
+
+def key2param(key):
+ """Converts key names into parameter names.
+
+ For example, converting "max-results" -> "max_results"
+
+ Args:
+ key: string, the method key name.
+
+ Returns:
+ A safe method name based on the key name.
+ """
+ result = []
+ key = list(key)
+ if not key[0].isalpha():
+ result.append("x")
+ for c in key:
+ if c.isalnum():
+ result.append(c)
+ else:
+ result.append("_")
+
+ return "".join(result)
+
+
+@positional(2)
+def build(
+ serviceName,
+ version,
+ http=None,
+ discoveryServiceUrl=None,
+ developerKey=None,
+ model=None,
+ requestBuilder=HttpRequest,
+ credentials=None,
+ cache_discovery=True,
+ cache=None,
+ client_options=None,
+ adc_cert_path=None,
+ adc_key_path=None,
+ num_retries=1,
+ static_discovery=None,
+ always_use_jwt_access=False,
+):
+ """Construct a Resource for interacting with an API.
+
+ Construct a Resource object for interacting with an API. The serviceName and
+ version are the names from the Discovery service.
+
+ Args:
+ serviceName: string, name of the service.
+ version: string, the version of the service.
+ http: httplib2.Http, An instance of httplib2.Http or something that acts
+ like it that HTTP requests will be made through.
+ discoveryServiceUrl: string, a URI Template that points to the location of
+ the discovery service. It should have two parameters {api} and
+ {apiVersion} that when filled in produce an absolute URI to the discovery
+ document for that service.
+ developerKey: string, key obtained from
+ https://code.google.com/apis/console.
+ model: googleapiclient.Model, converts to and from the wire format.
+ requestBuilder: googleapiclient.http.HttpRequest, encapsulator for an HTTP
+ request.
+ credentials: oauth2client.Credentials or
+ google.auth.credentials.Credentials, credentials to be used for
+ authentication.
+ cache_discovery: Boolean, whether or not to cache the discovery doc.
+ cache: googleapiclient.discovery_cache.base.CacheBase, an optional
+ cache object for the discovery documents.
+ client_options: Mapping object or google.api_core.client_options, client
+ options to set user options on the client.
+ (1) The API endpoint should be set through client_options. If API endpoint
+ is not set, `GOOGLE_API_USE_MTLS_ENDPOINT` environment variable can be used
+ to control which endpoint to use.
+ (2) client_cert_source is not supported, client cert should be provided using
+ client_encrypted_cert_source instead. In order to use the provided client
+ cert, `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be
+ set to `true`.
+ More details on the environment variables are here:
+ https://google.aip.dev/auth/4114
+ adc_cert_path: str, client certificate file path to save the application
+ default client certificate for mTLS. This field is required if you want to
+ use the default client certificate. `GOOGLE_API_USE_CLIENT_CERTIFICATE`
+ environment variable must be set to `true` in order to use this field,
+ otherwise this field doesn't nothing.
+ More details on the environment variables are here:
+ https://google.aip.dev/auth/4114
+ adc_key_path: str, client encrypted private key file path to save the
+ application default client encrypted private key for mTLS. This field is
+ required if you want to use the default client certificate.
+ `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be set to
+ `true` in order to use this field, otherwise this field doesn't nothing.
+ More details on the environment variables are here:
+ https://google.aip.dev/auth/4114
+ num_retries: Integer, number of times to retry discovery with
+ randomized exponential backoff in case of intermittent/connection issues.
+ static_discovery: Boolean, whether or not to use the static discovery docs
+ included in the library. The default value for `static_discovery` depends
+ on the value of `discoveryServiceUrl`. `static_discovery` will default to
+ `True` when `discoveryServiceUrl` is also not provided, otherwise it will
+ default to `False`.
+ always_use_jwt_access: Boolean, whether always use self signed JWT for service
+ account credentials. This only applies to
+ google.oauth2.service_account.Credentials.
+
+ Returns:
+ A Resource object with methods for interacting with the service.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: if there are any problems
+ setting up mutual TLS channel.
+ """
+ params = {"api": serviceName, "apiVersion": version}
+
+ # The default value for `static_discovery` depends on the value of
+ # `discoveryServiceUrl`. `static_discovery` will default to `True` when
+ # `discoveryServiceUrl` is also not provided, otherwise it will default to
+ # `False`. This is added for backwards compatability with
+ # google-api-python-client 1.x which does not support the `static_discovery`
+ # parameter.
+ if static_discovery is None:
+ if discoveryServiceUrl is None:
+ static_discovery = True
+ else:
+ static_discovery = False
+
+ if http is None:
+ discovery_http = build_http()
+ else:
+ discovery_http = http
+
+ service = None
+
+ for discovery_url in _discovery_service_uri_options(discoveryServiceUrl, version):
+ requested_url = uritemplate.expand(discovery_url, params)
+
+ try:
+ content = _retrieve_discovery_doc(
+ requested_url,
+ discovery_http,
+ cache_discovery,
+ serviceName,
+ version,
+ cache,
+ developerKey,
+ num_retries=num_retries,
+ static_discovery=static_discovery,
+ )
+ service = build_from_document(
+ content,
+ base=discovery_url,
+ http=http,
+ developerKey=developerKey,
+ model=model,
+ requestBuilder=requestBuilder,
+ credentials=credentials,
+ client_options=client_options,
+ adc_cert_path=adc_cert_path,
+ adc_key_path=adc_key_path,
+ always_use_jwt_access=always_use_jwt_access,
+ )
+ break # exit if a service was created
+ except HttpError as e:
+ if e.resp.status == http_client.NOT_FOUND:
+ continue
+ else:
+ raise e
+
+ # If discovery_http was created by this function, we are done with it
+ # and can safely close it
+ if http is None:
+ discovery_http.close()
+
+ if service is None:
+ raise UnknownApiNameOrVersion("name: %s version: %s" % (serviceName, version))
+ else:
+ return service
+
+
+def _discovery_service_uri_options(discoveryServiceUrl, version):
+ """
+ Returns Discovery URIs to be used for attempting to build the API Resource.
+
+ Args:
+ discoveryServiceUrl:
+ string, the Original Discovery Service URL preferred by the customer.
+ version:
+ string, API Version requested
+
+ Returns:
+ A list of URIs to be tried for the Service Discovery, in order.
+ """
+
+ if discoveryServiceUrl is not None:
+ return [discoveryServiceUrl]
+ if version is None:
+ # V1 Discovery won't work if the requested version is None
+ logger.warning(
+ "Discovery V1 does not support empty versions. Defaulting to V2..."
+ )
+ return [V2_DISCOVERY_URI]
+ else:
+ return [DISCOVERY_URI, V2_DISCOVERY_URI]
+
+
+def _retrieve_discovery_doc(
+ url,
+ http,
+ cache_discovery,
+ serviceName,
+ version,
+ cache=None,
+ developerKey=None,
+ num_retries=1,
+ static_discovery=True,
+):
+ """Retrieves the discovery_doc from cache or the internet.
+
+ Args:
+ url: string, the URL of the discovery document.
+ http: httplib2.Http, An instance of httplib2.Http or something that acts
+ like it through which HTTP requests will be made.
+ cache_discovery: Boolean, whether or not to cache the discovery doc.
+ serviceName: string, name of the service.
+ version: string, the version of the service.
+ cache: googleapiclient.discovery_cache.base.Cache, an optional cache
+ object for the discovery documents.
+ developerKey: string, Key for controlling API usage, generated
+ from the API Console.
+ num_retries: Integer, number of times to retry discovery with
+ randomized exponential backoff in case of intermittent/connection issues.
+ static_discovery: Boolean, whether or not to use the static discovery docs
+ included in the library.
+
+ Returns:
+ A unicode string representation of the discovery document.
+ """
+ from . import discovery_cache
+
+ if cache_discovery:
+ if cache is None:
+ cache = discovery_cache.autodetect()
+ if cache:
+ content = cache.get(url)
+ if content:
+ return content
+
+ # When `static_discovery=True`, use static discovery artifacts included
+ # with the library
+ if static_discovery:
+ content = discovery_cache.get_static_doc(serviceName, version)
+ if content:
+ return content
+ else:
+ raise UnknownApiNameOrVersion(
+ "name: %s version: %s" % (serviceName, version)
+ )
+
+ actual_url = url
+ # REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment
+ # variable that contains the network address of the client sending the
+ # request. If it exists then add that to the request for the discovery
+ # document to avoid exceeding the quota on discovery requests.
+ if "REMOTE_ADDR" in os.environ:
+ actual_url = _add_query_parameter(url, "userIp", os.environ["REMOTE_ADDR"])
+ if developerKey:
+ actual_url = _add_query_parameter(url, "key", developerKey)
+ logger.debug("URL being requested: GET %s", actual_url)
+
+ # Execute this request with retries build into HttpRequest
+ # Note that it will already raise an error if we don't get a 2xx response
+ req = HttpRequest(http, HttpRequest.null_postproc, actual_url)
+ resp, content = req.execute(num_retries=num_retries)
+
+ try:
+ content = content.decode("utf-8")
+ except AttributeError:
+ pass
+
+ try:
+ service = json.loads(content)
+ except ValueError as e:
+ logger.error("Failed to parse as JSON: " + content)
+ raise InvalidJsonError()
+ if cache_discovery and cache:
+ cache.set(url, content)
+ return content
+
+
+@positional(1)
+def build_from_document(
+ service,
+ base=None,
+ future=None,
+ http=None,
+ developerKey=None,
+ model=None,
+ requestBuilder=HttpRequest,
+ credentials=None,
+ client_options=None,
+ adc_cert_path=None,
+ adc_key_path=None,
+ always_use_jwt_access=False,
+):
+ """Create a Resource for interacting with an API.
+
+ Same as `build()`, but constructs the Resource object from a discovery
+ document that is it given, as opposed to retrieving one over HTTP.
+
+ Args:
+ service: string or object, the JSON discovery document describing the API.
+ The value passed in may either be the JSON string or the deserialized
+ JSON.
+ base: string, base URI for all HTTP requests, usually the discovery URI.
+ This parameter is no longer used as rootUrl and servicePath are included
+ within the discovery document. (deprecated)
+ future: string, discovery document with future capabilities (deprecated).
+ http: httplib2.Http, An instance of httplib2.Http or something that acts
+ like it that HTTP requests will be made through.
+ developerKey: string, Key for controlling API usage, generated
+ from the API Console.
+ model: Model class instance that serializes and de-serializes requests and
+ responses.
+ requestBuilder: Takes an http request and packages it up to be executed.
+ credentials: oauth2client.Credentials or
+ google.auth.credentials.Credentials, credentials to be used for
+ authentication.
+ client_options: Mapping object or google.api_core.client_options, client
+ options to set user options on the client.
+ (1) The API endpoint should be set through client_options. If API endpoint
+ is not set, `GOOGLE_API_USE_MTLS_ENDPOINT` environment variable can be used
+ to control which endpoint to use.
+ (2) client_cert_source is not supported, client cert should be provided using
+ client_encrypted_cert_source instead. In order to use the provided client
+ cert, `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be
+ set to `true`.
+ More details on the environment variables are here:
+ https://google.aip.dev/auth/4114
+ adc_cert_path: str, client certificate file path to save the application
+ default client certificate for mTLS. This field is required if you want to
+ use the default client certificate. `GOOGLE_API_USE_CLIENT_CERTIFICATE`
+ environment variable must be set to `true` in order to use this field,
+ otherwise this field doesn't nothing.
+ More details on the environment variables are here:
+ https://google.aip.dev/auth/4114
+ adc_key_path: str, client encrypted private key file path to save the
+ application default client encrypted private key for mTLS. This field is
+ required if you want to use the default client certificate.
+ `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be set to
+ `true` in order to use this field, otherwise this field doesn't nothing.
+ More details on the environment variables are here:
+ https://google.aip.dev/auth/4114
+ always_use_jwt_access: Boolean, whether always use self signed JWT for service
+ account credentials. This only applies to
+ google.oauth2.service_account.Credentials.
+
+ Returns:
+ A Resource object with methods for interacting with the service.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: if there are any problems
+ setting up mutual TLS channel.
+ """
+
+ if client_options is None:
+ client_options = google.api_core.client_options.ClientOptions()
+ if isinstance(client_options, collections.abc.Mapping):
+ client_options = google.api_core.client_options.from_dict(client_options)
+
+ if http is not None:
+ # if http is passed, the user cannot provide credentials
+ banned_options = [
+ (credentials, "credentials"),
+ (client_options.credentials_file, "client_options.credentials_file"),
+ ]
+ for option, name in banned_options:
+ if option is not None:
+ raise ValueError(
+ "Arguments http and {} are mutually exclusive".format(name)
+ )
+
+ if isinstance(service, str):
+ service = json.loads(service)
+ elif isinstance(service, bytes):
+ service = json.loads(service.decode("utf-8"))
+
+ if "rootUrl" not in service and isinstance(http, (HttpMock, HttpMockSequence)):
+ logger.error(
+ "You are using HttpMock or HttpMockSequence without"
+ + "having the service discovery doc in cache. Try calling "
+ + "build() without mocking once first to populate the "
+ + "cache."
+ )
+ raise InvalidJsonError()
+
+ # If an API Endpoint is provided on client options, use that as the base URL
+ base = urllib.parse.urljoin(service["rootUrl"], service["servicePath"])
+ audience_for_self_signed_jwt = base
+ if client_options.api_endpoint:
+ base = client_options.api_endpoint
+
+ schema = Schemas(service)
+
+ # If the http client is not specified, then we must construct an http client
+ # to make requests. If the service has scopes, then we also need to setup
+ # authentication.
+ if http is None:
+ # Does the service require scopes?
+ scopes = list(
+ service.get("auth", {}).get("oauth2", {}).get("scopes", {}).keys()
+ )
+
+ # If so, then the we need to setup authentication if no developerKey is
+ # specified.
+ if scopes and not developerKey:
+ # Make sure the user didn't pass multiple credentials
+ if client_options.credentials_file and credentials:
+ raise google.api_core.exceptions.DuplicateCredentialArgs(
+ "client_options.credentials_file and credentials are mutually exclusive."
+ )
+ # Check for credentials file via client options
+ if client_options.credentials_file:
+ credentials = _auth.credentials_from_file(
+ client_options.credentials_file,
+ scopes=client_options.scopes,
+ quota_project_id=client_options.quota_project_id,
+ )
+ # If the user didn't pass in credentials, attempt to acquire application
+ # default credentials.
+ if credentials is None:
+ credentials = _auth.default_credentials(
+ scopes=client_options.scopes,
+ quota_project_id=client_options.quota_project_id,
+ )
+
+ # The credentials need to be scoped.
+ # If the user provided scopes via client_options don't override them
+ if not client_options.scopes:
+ credentials = _auth.with_scopes(credentials, scopes)
+
+ # For google-auth service account credentials, enable self signed JWT if
+ # always_use_jwt_access is true.
+ if (
+ credentials
+ and isinstance(credentials, service_account.Credentials)
+ and always_use_jwt_access
+ and hasattr(service_account.Credentials, "with_always_use_jwt_access")
+ ):
+ credentials = credentials.with_always_use_jwt_access(always_use_jwt_access)
+ credentials._create_self_signed_jwt(audience_for_self_signed_jwt)
+
+ # If credentials are provided, create an authorized http instance;
+ # otherwise, skip authentication.
+ if credentials:
+ http = _auth.authorized_http(credentials)
+
+ # If the service doesn't require scopes then there is no need for
+ # authentication.
+ else:
+ http = build_http()
+
+ # Obtain client cert and create mTLS http channel if cert exists.
+ client_cert_to_use = None
+ use_client_cert = os.getenv(GOOGLE_API_USE_CLIENT_CERTIFICATE, "false")
+ if not use_client_cert in ("true", "false"):
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_CLIENT_CERTIFICATE value. Accepted values: true, false"
+ )
+ if client_options and client_options.client_cert_source:
+ raise MutualTLSChannelError(
+ "ClientOptions.client_cert_source is not supported, please use ClientOptions.client_encrypted_cert_source."
+ )
+ if use_client_cert == "true":
+ if (
+ client_options
+ and hasattr(client_options, "client_encrypted_cert_source")
+ and client_options.client_encrypted_cert_source
+ ):
+ client_cert_to_use = client_options.client_encrypted_cert_source
+ elif (
+ adc_cert_path and adc_key_path and mtls.has_default_client_cert_source()
+ ):
+ client_cert_to_use = mtls.default_client_encrypted_cert_source(
+ adc_cert_path, adc_key_path
+ )
+ if client_cert_to_use:
+ cert_path, key_path, passphrase = client_cert_to_use()
+
+ # The http object we built could be google_auth_httplib2.AuthorizedHttp
+ # or httplib2.Http. In the first case we need to extract the wrapped
+ # httplib2.Http object from google_auth_httplib2.AuthorizedHttp.
+ http_channel = (
+ http.http
+ if google_auth_httplib2
+ and isinstance(http, google_auth_httplib2.AuthorizedHttp)
+ else http
+ )
+ http_channel.add_certificate(key_path, cert_path, "", passphrase)
+
+ # If user doesn't provide api endpoint via client options, decide which
+ # api endpoint to use.
+ if "mtlsRootUrl" in service and (
+ not client_options or not client_options.api_endpoint
+ ):
+ mtls_endpoint = urllib.parse.urljoin(
+ service["mtlsRootUrl"], service["servicePath"]
+ )
+ use_mtls_endpoint = os.getenv(GOOGLE_API_USE_MTLS_ENDPOINT, "auto")
+
+ if not use_mtls_endpoint in ("never", "auto", "always"):
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
+ )
+
+ # Switch to mTLS endpoint, if environment variable is "always", or
+ # environment varibable is "auto" and client cert exists.
+ if use_mtls_endpoint == "always" or (
+ use_mtls_endpoint == "auto" and client_cert_to_use
+ ):
+ base = mtls_endpoint
+
+ if model is None:
+ features = service.get("features", [])
+ model = JsonModel("dataWrapper" in features)
+
+ return Resource(
+ http=http,
+ baseUrl=base,
+ model=model,
+ developerKey=developerKey,
+ requestBuilder=requestBuilder,
+ resourceDesc=service,
+ rootDesc=service,
+ schema=schema,
+ )
+
+
+def _cast(value, schema_type):
+ """Convert value to a string based on JSON Schema type.
+
+ See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on
+ JSON Schema.
+
+ Args:
+ value: any, the value to convert
+ schema_type: string, the type that value should be interpreted as
+
+ Returns:
+ A string representation of 'value' based on the schema_type.
+ """
+ if schema_type == "string":
+ if type(value) == type("") or type(value) == type(""):
+ return value
+ else:
+ return str(value)
+ elif schema_type == "integer":
+ return str(int(value))
+ elif schema_type == "number":
+ return str(float(value))
+ elif schema_type == "boolean":
+ return str(bool(value)).lower()
+ else:
+ if type(value) == type("") or type(value) == type(""):
+ return value
+ else:
+ return str(value)
+
+
+def _media_size_to_long(maxSize):
+ """Convert a string media size, such as 10GB or 3TB into an integer.
+
+ Args:
+ maxSize: string, size as a string, such as 2MB or 7GB.
+
+ Returns:
+ The size as an integer value.
+ """
+ if len(maxSize) < 2:
+ return 0
+ units = maxSize[-2:].upper()
+ bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units)
+ if bit_shift is not None:
+ return int(maxSize[:-2]) << bit_shift
+ else:
+ return int(maxSize)
+
+
+def _media_path_url_from_info(root_desc, path_url):
+ """Creates an absolute media path URL.
+
+ Constructed using the API root URI and service path from the discovery
+ document and the relative path for the API method.
+
+ Args:
+ root_desc: Dictionary; the entire original deserialized discovery document.
+ path_url: String; the relative URL for the API method. Relative to the API
+ root, which is specified in the discovery document.
+
+ Returns:
+ String; the absolute URI for media upload for the API method.
+ """
+ return "%(root)supload/%(service_path)s%(path)s" % {
+ "root": root_desc["rootUrl"],
+ "service_path": root_desc["servicePath"],
+ "path": path_url,
+ }
+
+
+def _fix_up_parameters(method_desc, root_desc, http_method, schema):
+ """Updates parameters of an API method with values specific to this library.
+
+ Specifically, adds whatever global parameters are specified by the API to the
+ parameters for the individual method. Also adds parameters which don't
+ appear in the discovery document, but are available to all discovery based
+ APIs (these are listed in STACK_QUERY_PARAMETERS).
+
+ SIDE EFFECTS: This updates the parameters dictionary object in the method
+ description.
+
+ Args:
+ method_desc: Dictionary with metadata describing an API method. Value comes
+ from the dictionary of methods stored in the 'methods' key in the
+ deserialized discovery document.
+ root_desc: Dictionary; the entire original deserialized discovery document.
+ http_method: String; the HTTP method used to call the API method described
+ in method_desc.
+ schema: Object, mapping of schema names to schema descriptions.
+
+ Returns:
+ The updated Dictionary stored in the 'parameters' key of the method
+ description dictionary.
+ """
+ parameters = method_desc.setdefault("parameters", {})
+
+ # Add in the parameters common to all methods.
+ for name, description in root_desc.get("parameters", {}).items():
+ parameters[name] = description
+
+ # Add in undocumented query parameters.
+ for name in STACK_QUERY_PARAMETERS:
+ parameters[name] = STACK_QUERY_PARAMETER_DEFAULT_VALUE.copy()
+
+ # Add 'body' (our own reserved word) to parameters if the method supports
+ # a request payload.
+ if http_method in HTTP_PAYLOAD_METHODS and "request" in method_desc:
+ body = BODY_PARAMETER_DEFAULT_VALUE.copy()
+ body.update(method_desc["request"])
+ parameters["body"] = body
+
+ return parameters
+
+
+def _fix_up_media_upload(method_desc, root_desc, path_url, parameters):
+ """Adds 'media_body' and 'media_mime_type' parameters if supported by method.
+
+ SIDE EFFECTS: If there is a 'mediaUpload' in the method description, adds
+ 'media_upload' key to parameters.
+
+ Args:
+ method_desc: Dictionary with metadata describing an API method. Value comes
+ from the dictionary of methods stored in the 'methods' key in the
+ deserialized discovery document.
+ root_desc: Dictionary; the entire original deserialized discovery document.
+ path_url: String; the relative URL for the API method. Relative to the API
+ root, which is specified in the discovery document.
+ parameters: A dictionary describing method parameters for method described
+ in method_desc.
+
+ Returns:
+ Triple (accept, max_size, media_path_url) where:
+ - accept is a list of strings representing what content types are
+ accepted for media upload. Defaults to empty list if not in the
+ discovery document.
+ - max_size is a long representing the max size in bytes allowed for a
+ media upload. Defaults to 0L if not in the discovery document.
+ - media_path_url is a String; the absolute URI for media upload for the
+ API method. Constructed using the API root URI and service path from
+ the discovery document and the relative path for the API method. If
+ media upload is not supported, this is None.
+ """
+ media_upload = method_desc.get("mediaUpload", {})
+ accept = media_upload.get("accept", [])
+ max_size = _media_size_to_long(media_upload.get("maxSize", ""))
+ media_path_url = None
+
+ if media_upload:
+ media_path_url = _media_path_url_from_info(root_desc, path_url)
+ parameters["media_body"] = MEDIA_BODY_PARAMETER_DEFAULT_VALUE.copy()
+ parameters["media_mime_type"] = MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE.copy()
+
+ return accept, max_size, media_path_url
+
+
+def _fix_up_method_description(method_desc, root_desc, schema):
+ """Updates a method description in a discovery document.
+
+ SIDE EFFECTS: Changes the parameters dictionary in the method description with
+ extra parameters which are used locally.
+
+ Args:
+ method_desc: Dictionary with metadata describing an API method. Value comes
+ from the dictionary of methods stored in the 'methods' key in the
+ deserialized discovery document.
+ root_desc: Dictionary; the entire original deserialized discovery document.
+ schema: Object, mapping of schema names to schema descriptions.
+
+ Returns:
+ Tuple (path_url, http_method, method_id, accept, max_size, media_path_url)
+ where:
+ - path_url is a String; the relative URL for the API method. Relative to
+ the API root, which is specified in the discovery document.
+ - http_method is a String; the HTTP method used to call the API method
+ described in the method description.
+ - method_id is a String; the name of the RPC method associated with the
+ API method, and is in the method description in the 'id' key.
+ - accept is a list of strings representing what content types are
+ accepted for media upload. Defaults to empty list if not in the
+ discovery document.
+ - max_size is a long representing the max size in bytes allowed for a
+ media upload. Defaults to 0L if not in the discovery document.
+ - media_path_url is a String; the absolute URI for media upload for the
+ API method. Constructed using the API root URI and service path from
+ the discovery document and the relative path for the API method. If
+ media upload is not supported, this is None.
+ """
+ path_url = method_desc["path"]
+ http_method = method_desc["httpMethod"]
+ method_id = method_desc["id"]
+
+ parameters = _fix_up_parameters(method_desc, root_desc, http_method, schema)
+ # Order is important. `_fix_up_media_upload` needs `method_desc` to have a
+ # 'parameters' key and needs to know if there is a 'body' parameter because it
+ # also sets a 'media_body' parameter.
+ accept, max_size, media_path_url = _fix_up_media_upload(
+ method_desc, root_desc, path_url, parameters
+ )
+
+ return path_url, http_method, method_id, accept, max_size, media_path_url
+
+
+def _fix_up_media_path_base_url(media_path_url, base_url):
+ """
+ Update the media upload base url if its netloc doesn't match base url netloc.
+
+ This can happen in case the base url was overridden by
+ client_options.api_endpoint.
+
+ Args:
+ media_path_url: String; the absolute URI for media upload.
+ base_url: string, base URL for the API. All requests are relative to this URI.
+
+ Returns:
+ String; the absolute URI for media upload.
+ """
+ parsed_media_url = urllib.parse.urlparse(media_path_url)
+ parsed_base_url = urllib.parse.urlparse(base_url)
+ if parsed_media_url.netloc == parsed_base_url.netloc:
+ return media_path_url
+ return urllib.parse.urlunparse(
+ parsed_media_url._replace(netloc=parsed_base_url.netloc)
+ )
+
+
+def _urljoin(base, url):
+ """Custom urljoin replacement supporting : before / in url."""
+ # In general, it's unsafe to simply join base and url. However, for
+ # the case of discovery documents, we know:
+ # * base will never contain params, query, or fragment
+ # * url will never contain a scheme or net_loc.
+ # In general, this means we can safely join on /; we just need to
+ # ensure we end up with precisely one / joining base and url. The
+ # exception here is the case of media uploads, where url will be an
+ # absolute url.
+ if url.startswith("http://") or url.startswith("https://"):
+ return urllib.parse.urljoin(base, url)
+ new_base = base if base.endswith("/") else base + "/"
+ new_url = url[1:] if url.startswith("/") else url
+ return new_base + new_url
+
+
+# TODO(dhermes): Convert this class to ResourceMethod and make it callable
+class ResourceMethodParameters(object):
+ """Represents the parameters associated with a method.
+
+ Attributes:
+ argmap: Map from method parameter name (string) to query parameter name
+ (string).
+ required_params: List of required parameters (represented by parameter
+ name as string).
+ repeated_params: List of repeated parameters (represented by parameter
+ name as string).
+ pattern_params: Map from method parameter name (string) to regular
+ expression (as a string). If the pattern is set for a parameter, the
+ value for that parameter must match the regular expression.
+ query_params: List of parameters (represented by parameter name as string)
+ that will be used in the query string.
+ path_params: Set of parameters (represented by parameter name as string)
+ that will be used in the base URL path.
+ param_types: Map from method parameter name (string) to parameter type. Type
+ can be any valid JSON schema type; valid values are 'any', 'array',
+ 'boolean', 'integer', 'number', 'object', or 'string'. Reference:
+ http://tools.ietf.org/html/draft-zyp-json-schema-03#section-5.1
+ enum_params: Map from method parameter name (string) to list of strings,
+ where each list of strings is the list of acceptable enum values.
+ """
+
+ def __init__(self, method_desc):
+ """Constructor for ResourceMethodParameters.
+
+ Sets default values and defers to set_parameters to populate.
+
+ Args:
+ method_desc: Dictionary with metadata describing an API method. Value
+ comes from the dictionary of methods stored in the 'methods' key in
+ the deserialized discovery document.
+ """
+ self.argmap = {}
+ self.required_params = []
+ self.repeated_params = []
+ self.pattern_params = {}
+ self.query_params = []
+ # TODO(dhermes): Change path_params to a list if the extra URITEMPLATE
+ # parsing is gotten rid of.
+ self.path_params = set()
+ self.param_types = {}
+ self.enum_params = {}
+
+ self.set_parameters(method_desc)
+
+ def set_parameters(self, method_desc):
+ """Populates maps and lists based on method description.
+
+ Iterates through each parameter for the method and parses the values from
+ the parameter dictionary.
+
+ Args:
+ method_desc: Dictionary with metadata describing an API method. Value
+ comes from the dictionary of methods stored in the 'methods' key in
+ the deserialized discovery document.
+ """
+ parameters = method_desc.get("parameters", {})
+ sorted_parameters = OrderedDict(sorted(parameters.items()))
+ for arg, desc in sorted_parameters.items():
+ param = key2param(arg)
+ self.argmap[param] = arg
+
+ if desc.get("pattern"):
+ self.pattern_params[param] = desc["pattern"]
+ if desc.get("enum"):
+ self.enum_params[param] = desc["enum"]
+ if desc.get("required"):
+ self.required_params.append(param)
+ if desc.get("repeated"):
+ self.repeated_params.append(param)
+ if desc.get("location") == "query":
+ self.query_params.append(param)
+ if desc.get("location") == "path":
+ self.path_params.add(param)
+ self.param_types[param] = desc.get("type", "string")
+
+ # TODO(dhermes): Determine if this is still necessary. Discovery based APIs
+ # should have all path parameters already marked with
+ # 'location: path'.
+ for match in URITEMPLATE.finditer(method_desc["path"]):
+ for namematch in VARNAME.finditer(match.group(0)):
+ name = key2param(namematch.group(0))
+ self.path_params.add(name)
+ if name in self.query_params:
+ self.query_params.remove(name)
+
+
+def createMethod(methodName, methodDesc, rootDesc, schema):
+ """Creates a method for attaching to a Resource.
+
+ Args:
+ methodName: string, name of the method to use.
+ methodDesc: object, fragment of deserialized discovery document that
+ describes the method.
+ rootDesc: object, the entire deserialized discovery document.
+ schema: object, mapping of schema names to schema descriptions.
+ """
+ methodName = fix_method_name(methodName)
+ (
+ pathUrl,
+ httpMethod,
+ methodId,
+ accept,
+ maxSize,
+ mediaPathUrl,
+ ) = _fix_up_method_description(methodDesc, rootDesc, schema)
+
+ parameters = ResourceMethodParameters(methodDesc)
+
+ def method(self, **kwargs):
+ # Don't bother with doc string, it will be over-written by createMethod.
+
+ for name in kwargs:
+ if name not in parameters.argmap:
+ raise TypeError("Got an unexpected keyword argument {}".format(name))
+
+ # Remove args that have a value of None.
+ keys = list(kwargs.keys())
+ for name in keys:
+ if kwargs[name] is None:
+ del kwargs[name]
+
+ for name in parameters.required_params:
+ if name not in kwargs:
+ # temporary workaround for non-paging methods incorrectly requiring
+ # page token parameter (cf. drive.changes.watch vs. drive.changes.list)
+ if name not in _PAGE_TOKEN_NAMES or _findPageTokenName(
+ _methodProperties(methodDesc, schema, "response")
+ ):
+ raise TypeError('Missing required parameter "%s"' % name)
+
+ for name, regex in parameters.pattern_params.items():
+ if name in kwargs:
+ if isinstance(kwargs[name], str):
+ pvalues = [kwargs[name]]
+ else:
+ pvalues = kwargs[name]
+ for pvalue in pvalues:
+ if re.match(regex, pvalue) is None:
+ raise TypeError(
+ 'Parameter "%s" value "%s" does not match the pattern "%s"'
+ % (name, pvalue, regex)
+ )
+
+ for name, enums in parameters.enum_params.items():
+ if name in kwargs:
+ # We need to handle the case of a repeated enum
+ # name differently, since we want to handle both
+ # arg='value' and arg=['value1', 'value2']
+ if name in parameters.repeated_params and not isinstance(
+ kwargs[name], str
+ ):
+ values = kwargs[name]
+ else:
+ values = [kwargs[name]]
+ for value in values:
+ if value not in enums:
+ raise TypeError(
+ 'Parameter "%s" value "%s" is not an allowed value in "%s"'
+ % (name, value, str(enums))
+ )
+
+ actual_query_params = {}
+ actual_path_params = {}
+ for key, value in kwargs.items():
+ to_type = parameters.param_types.get(key, "string")
+ # For repeated parameters we cast each member of the list.
+ if key in parameters.repeated_params and type(value) == type([]):
+ cast_value = [_cast(x, to_type) for x in value]
+ else:
+ cast_value = _cast(value, to_type)
+ if key in parameters.query_params:
+ actual_query_params[parameters.argmap[key]] = cast_value
+ if key in parameters.path_params:
+ actual_path_params[parameters.argmap[key]] = cast_value
+ body_value = kwargs.get("body", None)
+ media_filename = kwargs.get("media_body", None)
+ media_mime_type = kwargs.get("media_mime_type", None)
+
+ if self._developerKey:
+ actual_query_params["key"] = self._developerKey
+
+ model = self._model
+ if methodName.endswith("_media"):
+ model = MediaModel()
+ elif "response" not in methodDesc:
+ model = RawModel()
+
+ headers = {}
+ headers, params, query, body = model.request(
+ headers, actual_path_params, actual_query_params, body_value
+ )
+
+ expanded_url = uritemplate.expand(pathUrl, params)
+ url = _urljoin(self._baseUrl, expanded_url + query)
+
+ resumable = None
+ multipart_boundary = ""
+
+ if media_filename:
+ # Ensure we end up with a valid MediaUpload object.
+ if isinstance(media_filename, str):
+ if media_mime_type is None:
+ logger.warning(
+ "media_mime_type argument not specified: trying to auto-detect for %s",
+ media_filename,
+ )
+ media_mime_type, _ = mimetypes.guess_type(media_filename)
+ if media_mime_type is None:
+ raise UnknownFileType(media_filename)
+ if not mimeparse.best_match([media_mime_type], ",".join(accept)):
+ raise UnacceptableMimeTypeError(media_mime_type)
+ media_upload = MediaFileUpload(media_filename, mimetype=media_mime_type)
+ elif isinstance(media_filename, MediaUpload):
+ media_upload = media_filename
+ else:
+ raise TypeError("media_filename must be str or MediaUpload.")
+
+ # Check the maxSize
+ if media_upload.size() is not None and media_upload.size() > maxSize > 0:
+ raise MediaUploadSizeError("Media larger than: %s" % maxSize)
+
+ # Use the media path uri for media uploads
+ expanded_url = uritemplate.expand(mediaPathUrl, params)
+ url = _urljoin(self._baseUrl, expanded_url + query)
+ url = _fix_up_media_path_base_url(url, self._baseUrl)
+ if media_upload.resumable():
+ url = _add_query_parameter(url, "uploadType", "resumable")
+
+ if media_upload.resumable():
+ # This is all we need to do for resumable, if the body exists it gets
+ # sent in the first request, otherwise an empty body is sent.
+ resumable = media_upload
+ else:
+ # A non-resumable upload
+ if body is None:
+ # This is a simple media upload
+ headers["content-type"] = media_upload.mimetype()
+ body = media_upload.getbytes(0, media_upload.size())
+ url = _add_query_parameter(url, "uploadType", "media")
+ else:
+ # This is a multipart/related upload.
+ msgRoot = MIMEMultipart("related")
+ # msgRoot should not write out it's own headers
+ setattr(msgRoot, "_write_headers", lambda self: None)
+
+ # attach the body as one part
+ msg = MIMENonMultipart(*headers["content-type"].split("/"))
+ msg.set_payload(body)
+ msgRoot.attach(msg)
+
+ # attach the media as the second part
+ msg = MIMENonMultipart(*media_upload.mimetype().split("/"))
+ msg["Content-Transfer-Encoding"] = "binary"
+
+ payload = media_upload.getbytes(0, media_upload.size())
+ msg.set_payload(payload)
+ msgRoot.attach(msg)
+ # encode the body: note that we can't use `as_string`, because
+ # it plays games with `From ` lines.
+ fp = io.BytesIO()
+ g = _BytesGenerator(fp, mangle_from_=False)
+ g.flatten(msgRoot, unixfrom=False)
+ body = fp.getvalue()
+
+ multipart_boundary = msgRoot.get_boundary()
+ headers["content-type"] = (
+ "multipart/related; " 'boundary="%s"'
+ ) % multipart_boundary
+ url = _add_query_parameter(url, "uploadType", "multipart")
+
+ logger.debug("URL being requested: %s %s" % (httpMethod, url))
+ return self._requestBuilder(
+ self._http,
+ model.response,
+ url,
+ method=httpMethod,
+ body=body,
+ headers=headers,
+ methodId=methodId,
+ resumable=resumable,
+ )
+
+ docs = [methodDesc.get("description", DEFAULT_METHOD_DOC), "\n\n"]
+ if len(parameters.argmap) > 0:
+ docs.append("Args:\n")
+
+ # Skip undocumented params and params common to all methods.
+ skip_parameters = list(rootDesc.get("parameters", {}).keys())
+ skip_parameters.extend(STACK_QUERY_PARAMETERS)
+
+ all_args = list(parameters.argmap.keys())
+ args_ordered = [key2param(s) for s in methodDesc.get("parameterOrder", [])]
+
+ # Move body to the front of the line.
+ if "body" in all_args:
+ args_ordered.append("body")
+
+ for name in sorted(all_args):
+ if name not in args_ordered:
+ args_ordered.append(name)
+
+ for arg in args_ordered:
+ if arg in skip_parameters:
+ continue
+
+ repeated = ""
+ if arg in parameters.repeated_params:
+ repeated = " (repeated)"
+ required = ""
+ if arg in parameters.required_params:
+ required = " (required)"
+ paramdesc = methodDesc["parameters"][parameters.argmap[arg]]
+ paramdoc = paramdesc.get("description", "A parameter")
+ if "$ref" in paramdesc:
+ docs.append(
+ (" %s: object, %s%s%s\n The object takes the form of:\n\n%s\n\n")
+ % (
+ arg,
+ paramdoc,
+ required,
+ repeated,
+ schema.prettyPrintByName(paramdesc["$ref"]),
+ )
+ )
+ else:
+ paramtype = paramdesc.get("type", "string")
+ docs.append(
+ " %s: %s, %s%s%s\n" % (arg, paramtype, paramdoc, required, repeated)
+ )
+ enum = paramdesc.get("enum", [])
+ enumDesc = paramdesc.get("enumDescriptions", [])
+ if enum and enumDesc:
+ docs.append(" Allowed values\n")
+ for (name, desc) in zip(enum, enumDesc):
+ docs.append(" %s - %s\n" % (name, desc))
+ if "response" in methodDesc:
+ if methodName.endswith("_media"):
+ docs.append("\nReturns:\n The media object as a string.\n\n ")
+ else:
+ docs.append("\nReturns:\n An object of the form:\n\n ")
+ docs.append(schema.prettyPrintSchema(methodDesc["response"]))
+
+ setattr(method, "__doc__", "".join(docs))
+ return (methodName, method)
+
+
+def createNextMethod(
+ methodName,
+ pageTokenName="pageToken",
+ nextPageTokenName="nextPageToken",
+ isPageTokenParameter=True,
+):
+ """Creates any _next methods for attaching to a Resource.
+
+ The _next methods allow for easy iteration through list() responses.
+
+ Args:
+ methodName: string, name of the method to use.
+ pageTokenName: string, name of request page token field.
+ nextPageTokenName: string, name of response page token field.
+ isPageTokenParameter: Boolean, True if request page token is a query
+ parameter, False if request page token is a field of the request body.
+ """
+ methodName = fix_method_name(methodName)
+
+ def methodNext(self, previous_request, previous_response):
+ """Retrieves the next page of results.
+
+ Args:
+ previous_request: The request for the previous page. (required)
+ previous_response: The response from the request for the previous page. (required)
+
+ Returns:
+ A request object that you can call 'execute()' on to request the next
+ page. Returns None if there are no more items in the collection.
+ """
+ # Retrieve nextPageToken from previous_response
+ # Use as pageToken in previous_request to create new request.
+
+ nextPageToken = previous_response.get(nextPageTokenName, None)
+ if not nextPageToken:
+ return None
+
+ request = copy.copy(previous_request)
+
+ if isPageTokenParameter:
+ # Replace pageToken value in URI
+ request.uri = _add_query_parameter(
+ request.uri, pageTokenName, nextPageToken
+ )
+ logger.debug("Next page request URL: %s %s" % (methodName, request.uri))
+ else:
+ # Replace pageToken value in request body
+ model = self._model
+ body = model.deserialize(request.body)
+ body[pageTokenName] = nextPageToken
+ request.body = model.serialize(body)
+ request.body_size = len(request.body)
+ if "content-length" in request.headers:
+ del request.headers["content-length"]
+ logger.debug("Next page request body: %s %s" % (methodName, body))
+
+ return request
+
+ return (methodName, methodNext)
+
+
+class Resource(object):
+ """A class for interacting with a resource."""
+
+ def __init__(
+ self,
+ http,
+ baseUrl,
+ model,
+ requestBuilder,
+ developerKey,
+ resourceDesc,
+ rootDesc,
+ schema,
+ ):
+ """Build a Resource from the API description.
+
+ Args:
+ http: httplib2.Http, Object to make http requests with.
+ baseUrl: string, base URL for the API. All requests are relative to this
+ URI.
+ model: googleapiclient.Model, converts to and from the wire format.
+ requestBuilder: class or callable that instantiates an
+ googleapiclient.HttpRequest object.
+ developerKey: string, key obtained from
+ https://code.google.com/apis/console
+ resourceDesc: object, section of deserialized discovery document that
+ describes a resource. Note that the top level discovery document
+ is considered a resource.
+ rootDesc: object, the entire deserialized discovery document.
+ schema: object, mapping of schema names to schema descriptions.
+ """
+ self._dynamic_attrs = []
+
+ self._http = http
+ self._baseUrl = baseUrl
+ self._model = model
+ self._developerKey = developerKey
+ self._requestBuilder = requestBuilder
+ self._resourceDesc = resourceDesc
+ self._rootDesc = rootDesc
+ self._schema = schema
+
+ self._set_service_methods()
+
+ def _set_dynamic_attr(self, attr_name, value):
+ """Sets an instance attribute and tracks it in a list of dynamic attributes.
+
+ Args:
+ attr_name: string; The name of the attribute to be set
+ value: The value being set on the object and tracked in the dynamic cache.
+ """
+ self._dynamic_attrs.append(attr_name)
+ self.__dict__[attr_name] = value
+
+ def __getstate__(self):
+ """Trim the state down to something that can be pickled.
+
+ Uses the fact that the instance variable _dynamic_attrs holds attrs that
+ will be wiped and restored on pickle serialization.
+ """
+ state_dict = copy.copy(self.__dict__)
+ for dynamic_attr in self._dynamic_attrs:
+ del state_dict[dynamic_attr]
+ del state_dict["_dynamic_attrs"]
+ return state_dict
+
+ def __setstate__(self, state):
+ """Reconstitute the state of the object from being pickled.
+
+ Uses the fact that the instance variable _dynamic_attrs holds attrs that
+ will be wiped and restored on pickle serialization.
+ """
+ self.__dict__.update(state)
+ self._dynamic_attrs = []
+ self._set_service_methods()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc, exc_tb):
+ self.close()
+
+ def close(self):
+ """Close httplib2 connections."""
+ # httplib2 leaves sockets open by default.
+ # Cleanup using the `close` method.
+ # https://github.com/httplib2/httplib2/issues/148
+ self._http.close()
+
+ def _set_service_methods(self):
+ self._add_basic_methods(self._resourceDesc, self._rootDesc, self._schema)
+ self._add_nested_resources(self._resourceDesc, self._rootDesc, self._schema)
+ self._add_next_methods(self._resourceDesc, self._schema)
+
+ def _add_basic_methods(self, resourceDesc, rootDesc, schema):
+ # If this is the root Resource, add a new_batch_http_request() method.
+ if resourceDesc == rootDesc:
+ batch_uri = "%s%s" % (
+ rootDesc["rootUrl"],
+ rootDesc.get("batchPath", "batch"),
+ )
+
+ def new_batch_http_request(callback=None):
+ """Create a BatchHttpRequest object based on the discovery document.
+
+ Args:
+ callback: callable, A callback to be called for each response, of the
+ form callback(id, response, exception). The first parameter is the
+ request id, and the second is the deserialized response object. The
+ third is an apiclient.errors.HttpError exception object if an HTTP
+ error occurred while processing the request, or None if no error
+ occurred.
+
+ Returns:
+ A BatchHttpRequest object based on the discovery document.
+ """
+ return BatchHttpRequest(callback=callback, batch_uri=batch_uri)
+
+ self._set_dynamic_attr("new_batch_http_request", new_batch_http_request)
+
+ # Add basic methods to Resource
+ if "methods" in resourceDesc:
+ for methodName, methodDesc in resourceDesc["methods"].items():
+ fixedMethodName, method = createMethod(
+ methodName, methodDesc, rootDesc, schema
+ )
+ self._set_dynamic_attr(
+ fixedMethodName, method.__get__(self, self.__class__)
+ )
+ # Add in _media methods. The functionality of the attached method will
+ # change when it sees that the method name ends in _media.
+ if methodDesc.get("supportsMediaDownload", False):
+ fixedMethodName, method = createMethod(
+ methodName + "_media", methodDesc, rootDesc, schema
+ )
+ self._set_dynamic_attr(
+ fixedMethodName, method.__get__(self, self.__class__)
+ )
+
+ def _add_nested_resources(self, resourceDesc, rootDesc, schema):
+ # Add in nested resources
+ if "resources" in resourceDesc:
+
+ def createResourceMethod(methodName, methodDesc):
+ """Create a method on the Resource to access a nested Resource.
+
+ Args:
+ methodName: string, name of the method to use.
+ methodDesc: object, fragment of deserialized discovery document that
+ describes the method.
+ """
+ methodName = fix_method_name(methodName)
+
+ def methodResource(self):
+ return Resource(
+ http=self._http,
+ baseUrl=self._baseUrl,
+ model=self._model,
+ developerKey=self._developerKey,
+ requestBuilder=self._requestBuilder,
+ resourceDesc=methodDesc,
+ rootDesc=rootDesc,
+ schema=schema,
+ )
+
+ setattr(methodResource, "__doc__", "A collection resource.")
+ setattr(methodResource, "__is_resource__", True)
+
+ return (methodName, methodResource)
+
+ for methodName, methodDesc in resourceDesc["resources"].items():
+ fixedMethodName, method = createResourceMethod(methodName, methodDesc)
+ self._set_dynamic_attr(
+ fixedMethodName, method.__get__(self, self.__class__)
+ )
+
+ def _add_next_methods(self, resourceDesc, schema):
+ # Add _next() methods if and only if one of the names 'pageToken' or
+ # 'nextPageToken' occurs among the fields of both the method's response
+ # type either the method's request (query parameters) or request body.
+ if "methods" not in resourceDesc:
+ return
+ for methodName, methodDesc in resourceDesc["methods"].items():
+ nextPageTokenName = _findPageTokenName(
+ _methodProperties(methodDesc, schema, "response")
+ )
+ if not nextPageTokenName:
+ continue
+ isPageTokenParameter = True
+ pageTokenName = _findPageTokenName(methodDesc.get("parameters", {}))
+ if not pageTokenName:
+ isPageTokenParameter = False
+ pageTokenName = _findPageTokenName(
+ _methodProperties(methodDesc, schema, "request")
+ )
+ if not pageTokenName:
+ continue
+ fixedMethodName, method = createNextMethod(
+ methodName + "_next",
+ pageTokenName,
+ nextPageTokenName,
+ isPageTokenParameter,
+ )
+ self._set_dynamic_attr(
+ fixedMethodName, method.__get__(self, self.__class__)
+ )
+
+
+def _findPageTokenName(fields):
+ """Search field names for one like a page token.
+
+ Args:
+ fields: container of string, names of fields.
+
+ Returns:
+ First name that is either 'pageToken' or 'nextPageToken' if one exists,
+ otherwise None.
+ """
+ return next(
+ (tokenName for tokenName in _PAGE_TOKEN_NAMES if tokenName in fields), None
+ )
+
+
+def _methodProperties(methodDesc, schema, name):
+ """Get properties of a field in a method description.
+
+ Args:
+ methodDesc: object, fragment of deserialized discovery document that
+ describes the method.
+ schema: object, mapping of schema names to schema descriptions.
+ name: string, name of top-level field in method description.
+
+ Returns:
+ Object representing fragment of deserialized discovery document
+ corresponding to 'properties' field of object corresponding to named field
+ in method description, if it exists, otherwise empty dict.
+ """
+ desc = methodDesc.get(name, {})
+ if "$ref" in desc:
+ desc = schema.get(desc["$ref"], {})
+ return desc.get("properties", {})
diff --git a/Lib/site-packages/googleapiclient/discovery_cache/__init__.py b/Lib/site-packages/googleapiclient/discovery_cache/__init__.py
new file mode 100644
index 0000000..6051191
--- /dev/null
+++ b/Lib/site-packages/googleapiclient/discovery_cache/__init__.py
@@ -0,0 +1,78 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Caching utility for the discovery document."""
+
+from __future__ import absolute_import
+
+import logging
+import os
+
+LOGGER = logging.getLogger(__name__)
+
+DISCOVERY_DOC_MAX_AGE = 60 * 60 * 24 # 1 day
+DISCOVERY_DOC_DIR = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)), "documents"
+)
+
+
+def autodetect():
+ """Detects an appropriate cache module and returns it.
+
+ Returns:
+ googleapiclient.discovery_cache.base.Cache, a cache object which
+ is auto detected, or None if no cache object is available.
+ """
+ if "GAE_ENV" in os.environ:
+ try:
+ from . import appengine_memcache
+
+ return appengine_memcache.cache
+ except Exception:
+ pass
+ try:
+ from . import file_cache
+
+ return file_cache.cache
+ except Exception:
+ LOGGER.info(
+ "file_cache is only supported with oauth2client<4.0.0", exc_info=False
+ )
+ return None
+
+
+def get_static_doc(serviceName, version):
+ """Retrieves the discovery document from the directory defined in
+ DISCOVERY_DOC_DIR corresponding to the serviceName and version provided.
+
+ Args:
+ serviceName: string, name of the service.
+ version: string, the version of the service.
+
+ Returns:
+ A string containing the contents of the JSON discovery document,
+ otherwise None if the JSON discovery document was not found.
+ """
+
+ content = None
+ doc_name = "{}.{}.json".format(serviceName, version)
+
+ try:
+ with open(os.path.join(DISCOVERY_DOC_DIR, doc_name), "r") as f:
+ content = f.read()
+ except FileNotFoundError:
+ # File does not exist. Nothing to do here.
+ pass
+
+ return content
diff --git a/Lib/site-packages/googleapiclient/discovery_cache/appengine_memcache.py b/Lib/site-packages/googleapiclient/discovery_cache/appengine_memcache.py
new file mode 100644
index 0000000..73a1dec
--- /dev/null
+++ b/Lib/site-packages/googleapiclient/discovery_cache/appengine_memcache.py
@@ -0,0 +1,55 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""App Engine memcache based cache for the discovery document."""
+
+import logging
+
+# This is only an optional dependency because we only import this
+# module when google.appengine.api.memcache is available.
+from google.appengine.api import memcache
+
+from . import base
+from ..discovery_cache import DISCOVERY_DOC_MAX_AGE
+
+LOGGER = logging.getLogger(__name__)
+
+NAMESPACE = "google-api-client"
+
+
+class Cache(base.Cache):
+ """A cache with app engine memcache API."""
+
+ def __init__(self, max_age):
+ """Constructor.
+
+ Args:
+ max_age: Cache expiration in seconds.
+ """
+ self._max_age = max_age
+
+ def get(self, url):
+ try:
+ return memcache.get(url, namespace=NAMESPACE)
+ except Exception as e:
+ LOGGER.warning(e, exc_info=True)
+
+ def set(self, url, content):
+ try:
+ memcache.set(url, content, time=int(self._max_age), namespace=NAMESPACE)
+ except Exception as e:
+ LOGGER.warning(e, exc_info=True)
+
+
+cache = Cache(max_age=DISCOVERY_DOC_MAX_AGE)
diff --git a/Lib/site-packages/googleapiclient/discovery_cache/base.py b/Lib/site-packages/googleapiclient/discovery_cache/base.py
new file mode 100644
index 0000000..41f3f3f
--- /dev/null
+++ b/Lib/site-packages/googleapiclient/discovery_cache/base.py
@@ -0,0 +1,46 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""An abstract class for caching the discovery document."""
+
+import abc
+
+
+class Cache(object):
+ """A base abstract cache class."""
+
+ __metaclass__ = abc.ABCMeta
+
+ @abc.abstractmethod
+ def get(self, url):
+ """Gets the content from the memcache with a given key.
+
+ Args:
+ url: string, the key for the cache.
+
+ Returns:
+ object, the value in the cache for the given key, or None if the key is
+ not in the cache.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def set(self, url, content):
+ """Sets the given key and content in the cache.
+
+ Args:
+ url: string, the key for the cache.
+ content: string, the discovery document.
+ """
+ raise NotImplementedError()
diff --git a/Lib/site-packages/googleapiclient/discovery_cache/file_cache.py b/Lib/site-packages/googleapiclient/discovery_cache/file_cache.py
new file mode 100644
index 0000000..28a7219
--- /dev/null
+++ b/Lib/site-packages/googleapiclient/discovery_cache/file_cache.py
@@ -0,0 +1,145 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""File based cache for the discovery document.
+
+The cache is stored in a single file so that multiple processes can
+share the same cache. It locks the file whenever accessing to the
+file. When the cache content is corrupted, it will be initialized with
+an empty cache.
+"""
+
+from __future__ import division
+
+import datetime
+import json
+import logging
+import os
+import tempfile
+
+try:
+ from oauth2client.contrib.locked_file import LockedFile
+except ImportError:
+ # oauth2client < 2.0.0
+ try:
+ from oauth2client.locked_file import LockedFile
+ except ImportError:
+ # oauth2client > 4.0.0 or google-auth
+ raise ImportError(
+ "file_cache is unavailable when using oauth2client >= 4.0.0 or google-auth"
+ )
+
+from . import base
+from ..discovery_cache import DISCOVERY_DOC_MAX_AGE
+
+LOGGER = logging.getLogger(__name__)
+
+FILENAME = "google-api-python-client-discovery-doc.cache"
+EPOCH = datetime.datetime(1970, 1, 1)
+
+
+def _to_timestamp(date):
+ try:
+ return (date - EPOCH).total_seconds()
+ except AttributeError:
+ # The following is the equivalent of total_seconds() in Python2.6.
+ # See also: https://docs.python.org/2/library/datetime.html
+ delta = date - EPOCH
+ return (
+ delta.microseconds + (delta.seconds + delta.days * 24 * 3600) * 10**6
+ ) / 10**6
+
+
+def _read_or_initialize_cache(f):
+ f.file_handle().seek(0)
+ try:
+ cache = json.load(f.file_handle())
+ except Exception:
+ # This means it opens the file for the first time, or the cache is
+ # corrupted, so initializing the file with an empty dict.
+ cache = {}
+ f.file_handle().truncate(0)
+ f.file_handle().seek(0)
+ json.dump(cache, f.file_handle())
+ return cache
+
+
+class Cache(base.Cache):
+ """A file based cache for the discovery documents."""
+
+ def __init__(self, max_age):
+ """Constructor.
+
+ Args:
+ max_age: Cache expiration in seconds.
+ """
+ self._max_age = max_age
+ self._file = os.path.join(tempfile.gettempdir(), FILENAME)
+ f = LockedFile(self._file, "a+", "r")
+ try:
+ f.open_and_lock()
+ if f.is_locked():
+ _read_or_initialize_cache(f)
+ # If we can not obtain the lock, other process or thread must
+ # have initialized the file.
+ except Exception as e:
+ LOGGER.warning(e, exc_info=True)
+ finally:
+ f.unlock_and_close()
+
+ def get(self, url):
+ f = LockedFile(self._file, "r+", "r")
+ try:
+ f.open_and_lock()
+ if f.is_locked():
+ cache = _read_or_initialize_cache(f)
+ if url in cache:
+ content, t = cache.get(url, (None, 0))
+ if _to_timestamp(datetime.datetime.now()) < t + self._max_age:
+ return content
+ return None
+ else:
+ LOGGER.debug("Could not obtain a lock for the cache file.")
+ return None
+ except Exception as e:
+ LOGGER.warning(e, exc_info=True)
+ finally:
+ f.unlock_and_close()
+
+ def set(self, url, content):
+ f = LockedFile(self._file, "r+", "r")
+ try:
+ f.open_and_lock()
+ if f.is_locked():
+ cache = _read_or_initialize_cache(f)
+ cache[url] = (content, _to_timestamp(datetime.datetime.now()))
+ # Remove stale cache.
+ for k, (_, timestamp) in list(cache.items()):
+ if (
+ _to_timestamp(datetime.datetime.now())
+ >= timestamp + self._max_age
+ ):
+ del cache[k]
+ f.file_handle().truncate(0)
+ f.file_handle().seek(0)
+ json.dump(cache, f.file_handle())
+ else:
+ LOGGER.debug("Could not obtain a lock for the cache file.")
+ except Exception as e:
+ LOGGER.warning(e, exc_info=True)
+ finally:
+ f.unlock_and_close()
+
+
+cache = Cache(max_age=DISCOVERY_DOC_MAX_AGE)
diff --git a/Lib/site-packages/googleapiclient/errors.py b/Lib/site-packages/googleapiclient/errors.py
new file mode 100644
index 0000000..288594a
--- /dev/null
+++ b/Lib/site-packages/googleapiclient/errors.py
@@ -0,0 +1,197 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Errors for the library.
+
+All exceptions defined by the library
+should be defined in this file.
+"""
+from __future__ import absolute_import
+
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
+
+import json
+
+from googleapiclient import _helpers as util
+
+
+class Error(Exception):
+ """Base error for this module."""
+
+ pass
+
+
+class HttpError(Error):
+ """HTTP data was invalid or unexpected."""
+
+ @util.positional(3)
+ def __init__(self, resp, content, uri=None):
+ self.resp = resp
+ if not isinstance(content, bytes):
+ raise TypeError("HTTP content should be bytes")
+ self.content = content
+ self.uri = uri
+ self.error_details = ""
+ self.reason = self._get_reason()
+
+ @property
+ def status_code(self):
+ """Return the HTTP status code from the response content."""
+ return self.resp.status
+
+ def _get_reason(self):
+ """Calculate the reason for the error from the response content."""
+ reason = self.resp.reason
+ try:
+ try:
+ data = json.loads(self.content.decode("utf-8"))
+ except json.JSONDecodeError:
+ # In case it is not json
+ data = self.content.decode("utf-8")
+ if isinstance(data, dict):
+ reason = data["error"]["message"]
+ error_detail_keyword = next(
+ (
+ kw
+ for kw in ["detail", "details", "errors", "message"]
+ if kw in data["error"]
+ ),
+ "",
+ )
+ if error_detail_keyword:
+ self.error_details = data["error"][error_detail_keyword]
+ elif isinstance(data, list) and len(data) > 0:
+ first_error = data[0]
+ reason = first_error["error"]["message"]
+ if "details" in first_error["error"]:
+ self.error_details = first_error["error"]["details"]
+ else:
+ self.error_details = data
+ except (ValueError, KeyError, TypeError):
+ pass
+ if reason is None:
+ reason = ""
+ return reason.strip()
+
+ def __repr__(self):
+ if self.error_details:
+ return '' % (
+ self.resp.status,
+ self.uri,
+ self.reason,
+ self.error_details,
+ )
+ elif self.uri:
+ return '' % (
+ self.resp.status,
+ self.uri,
+ self.reason,
+ )
+ else:
+ return '' % (self.resp.status, self.reason)
+
+ __str__ = __repr__
+
+
+class InvalidJsonError(Error):
+ """The JSON returned could not be parsed."""
+
+ pass
+
+
+class UnknownFileType(Error):
+ """File type unknown or unexpected."""
+
+ pass
+
+
+class UnknownLinkType(Error):
+ """Link type unknown or unexpected."""
+
+ pass
+
+
+class UnknownApiNameOrVersion(Error):
+ """No API with that name and version exists."""
+
+ pass
+
+
+class UnacceptableMimeTypeError(Error):
+ """That is an unacceptable mimetype for this operation."""
+
+ pass
+
+
+class MediaUploadSizeError(Error):
+ """Media is larger than the method can accept."""
+
+ pass
+
+
+class ResumableUploadError(HttpError):
+ """Error occurred during resumable upload."""
+
+ pass
+
+
+class InvalidChunkSizeError(Error):
+ """The given chunksize is not valid."""
+
+ pass
+
+
+class InvalidNotificationError(Error):
+ """The channel Notification is invalid."""
+
+ pass
+
+
+class BatchError(HttpError):
+ """Error occurred during batch operations."""
+
+ @util.positional(2)
+ def __init__(self, reason, resp=None, content=None):
+ self.resp = resp
+ self.content = content
+ self.reason = reason
+
+ def __repr__(self):
+ if getattr(self.resp, "status", None) is None:
+ return '' % (self.reason)
+ else:
+ return '' % (self.resp.status, self.reason)
+
+ __str__ = __repr__
+
+
+class UnexpectedMethodError(Error):
+ """Exception raised by RequestMockBuilder on unexpected calls."""
+
+ @util.positional(1)
+ def __init__(self, methodId=None):
+ """Constructor for an UnexpectedMethodError."""
+ super(UnexpectedMethodError, self).__init__(
+ "Received unexpected call %s" % methodId
+ )
+
+
+class UnexpectedBodyError(Error):
+ """Exception raised by RequestMockBuilder on unexpected bodies."""
+
+ def __init__(self, expected, provided):
+ """Constructor for an UnexpectedMethodError."""
+ super(UnexpectedBodyError, self).__init__(
+ "Expected: [%s] - Provided: [%s]" % (expected, provided)
+ )
diff --git a/Lib/site-packages/googleapiclient/http.py b/Lib/site-packages/googleapiclient/http.py
new file mode 100644
index 0000000..187f6f5
--- /dev/null
+++ b/Lib/site-packages/googleapiclient/http.py
@@ -0,0 +1,1962 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes to encapsulate a single HTTP request.
+
+The classes implement a command pattern, with every
+object supporting an execute() method that does the
+actual HTTP request.
+"""
+from __future__ import absolute_import
+
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
+
+import copy
+import http.client as http_client
+import io
+import json
+import logging
+import mimetypes
+import os
+import random
+import socket
+import time
+import urllib
+import uuid
+
+import httplib2
+
+# TODO(issue 221): Remove this conditional import jibbajabba.
+try:
+ import ssl
+except ImportError:
+ _ssl_SSLError = object()
+else:
+ _ssl_SSLError = ssl.SSLError
+
+from email.generator import Generator
+from email.mime.multipart import MIMEMultipart
+from email.mime.nonmultipart import MIMENonMultipart
+from email.parser import FeedParser
+
+from googleapiclient import _auth
+from googleapiclient import _helpers as util
+from googleapiclient.errors import (
+ BatchError,
+ HttpError,
+ InvalidChunkSizeError,
+ ResumableUploadError,
+ UnexpectedBodyError,
+ UnexpectedMethodError,
+)
+from googleapiclient.model import JsonModel
+
+LOGGER = logging.getLogger(__name__)
+
+DEFAULT_CHUNK_SIZE = 100 * 1024 * 1024
+
+MAX_URI_LENGTH = 2048
+
+MAX_BATCH_LIMIT = 1000
+
+_TOO_MANY_REQUESTS = 429
+
+DEFAULT_HTTP_TIMEOUT_SEC = 60
+
+_LEGACY_BATCH_URI = "https://www.googleapis.com/batch"
+
+
+def _should_retry_response(resp_status, content):
+ """Determines whether a response should be retried.
+
+ Args:
+ resp_status: The response status received.
+ content: The response content body.
+
+ Returns:
+ True if the response should be retried, otherwise False.
+ """
+ reason = None
+
+ # Retry on 5xx errors.
+ if resp_status >= 500:
+ return True
+
+ # Retry on 429 errors.
+ if resp_status == _TOO_MANY_REQUESTS:
+ return True
+
+ # For 403 errors, we have to check for the `reason` in the response to
+ # determine if we should retry.
+ if resp_status == http_client.FORBIDDEN:
+ # If there's no details about the 403 type, don't retry.
+ if not content:
+ return False
+
+ # Content is in JSON format.
+ try:
+ data = json.loads(content.decode("utf-8"))
+ if isinstance(data, dict):
+ # There are many variations of the error json so we need
+ # to determine the keyword which has the error detail. Make sure
+ # that the order of the keywords below isn't changed as it can
+ # break user code. If the "errors" key exists, we must use that
+ # first.
+ # See Issue #1243
+ # https://github.com/googleapis/google-api-python-client/issues/1243
+ error_detail_keyword = next(
+ (
+ kw
+ for kw in ["errors", "status", "message"]
+ if kw in data["error"]
+ ),
+ "",
+ )
+
+ if error_detail_keyword:
+ reason = data["error"][error_detail_keyword]
+
+ if isinstance(reason, list) and len(reason) > 0:
+ reason = reason[0]
+ if "reason" in reason:
+ reason = reason["reason"]
+ else:
+ reason = data[0]["error"]["errors"]["reason"]
+ except (UnicodeDecodeError, ValueError, KeyError):
+ LOGGER.warning("Invalid JSON content from response: %s", content)
+ return False
+
+ LOGGER.warning('Encountered 403 Forbidden with reason "%s"', reason)
+
+ # Only retry on rate limit related failures.
+ if reason in ("userRateLimitExceeded", "rateLimitExceeded"):
+ return True
+
+ # Everything else is a success or non-retriable so break.
+ return False
+
+
+def _retry_request(
+ http, num_retries, req_type, sleep, rand, uri, method, *args, **kwargs
+):
+ """Retries an HTTP request multiple times while handling errors.
+
+ If after all retries the request still fails, last error is either returned as
+ return value (for HTTP 5xx errors) or thrown (for ssl.SSLError).
+
+ Args:
+ http: Http object to be used to execute request.
+ num_retries: Maximum number of retries.
+ req_type: Type of the request (used for logging retries).
+ sleep, rand: Functions to sleep for random time between retries.
+ uri: URI to be requested.
+ method: HTTP method to be used.
+ args, kwargs: Additional arguments passed to http.request.
+
+ Returns:
+ resp, content - Response from the http request (may be HTTP 5xx).
+ """
+ resp = None
+ content = None
+ exception = None
+ for retry_num in range(num_retries + 1):
+ if retry_num > 0:
+ # Sleep before retrying.
+ sleep_time = rand() * 2**retry_num
+ LOGGER.warning(
+ "Sleeping %.2f seconds before retry %d of %d for %s: %s %s, after %s",
+ sleep_time,
+ retry_num,
+ num_retries,
+ req_type,
+ method,
+ uri,
+ resp.status if resp else exception,
+ )
+ sleep(sleep_time)
+
+ try:
+ exception = None
+ resp, content = http.request(uri, method, *args, **kwargs)
+ # Retry on SSL errors and socket timeout errors.
+ except _ssl_SSLError as ssl_error:
+ exception = ssl_error
+ except socket.timeout as socket_timeout:
+ # Needs to be before socket.error as it's a subclass of OSError
+ # socket.timeout has no errorcode
+ exception = socket_timeout
+ except ConnectionError as connection_error:
+ # Needs to be before socket.error as it's a subclass of OSError
+ exception = connection_error
+ except OSError as socket_error:
+ # errno's contents differ by platform, so we have to match by name.
+ # Some of these same errors may have been caught above, e.g. ECONNRESET *should* be
+ # raised as a ConnectionError, but some libraries will raise it as a socket.error
+ # with an errno corresponding to ECONNRESET
+ if socket.errno.errorcode.get(socket_error.errno) not in {
+ "WSAETIMEDOUT",
+ "ETIMEDOUT",
+ "EPIPE",
+ "ECONNABORTED",
+ "ECONNREFUSED",
+ "ECONNRESET",
+ }:
+ raise
+ exception = socket_error
+ except httplib2.ServerNotFoundError as server_not_found_error:
+ exception = server_not_found_error
+
+ if exception:
+ if retry_num == num_retries:
+ raise exception
+ else:
+ continue
+
+ if not _should_retry_response(resp.status, content):
+ break
+
+ return resp, content
+
+
+class MediaUploadProgress(object):
+ """Status of a resumable upload."""
+
+ def __init__(self, resumable_progress, total_size):
+ """Constructor.
+
+ Args:
+ resumable_progress: int, bytes sent so far.
+ total_size: int, total bytes in complete upload, or None if the total
+ upload size isn't known ahead of time.
+ """
+ self.resumable_progress = resumable_progress
+ self.total_size = total_size
+
+ def progress(self):
+ """Percent of upload completed, as a float.
+
+ Returns:
+ the percentage complete as a float, returning 0.0 if the total size of
+ the upload is unknown.
+ """
+ if self.total_size is not None and self.total_size != 0:
+ return float(self.resumable_progress) / float(self.total_size)
+ else:
+ return 0.0
+
+
+class MediaDownloadProgress(object):
+ """Status of a resumable download."""
+
+ def __init__(self, resumable_progress, total_size):
+ """Constructor.
+
+ Args:
+ resumable_progress: int, bytes received so far.
+ total_size: int, total bytes in complete download.
+ """
+ self.resumable_progress = resumable_progress
+ self.total_size = total_size
+
+ def progress(self):
+ """Percent of download completed, as a float.
+
+ Returns:
+ the percentage complete as a float, returning 0.0 if the total size of
+ the download is unknown.
+ """
+ if self.total_size is not None and self.total_size != 0:
+ return float(self.resumable_progress) / float(self.total_size)
+ else:
+ return 0.0
+
+
+class MediaUpload(object):
+ """Describes a media object to upload.
+
+ Base class that defines the interface of MediaUpload subclasses.
+
+ Note that subclasses of MediaUpload may allow you to control the chunksize
+ when uploading a media object. It is important to keep the size of the chunk
+ as large as possible to keep the upload efficient. Other factors may influence
+ the size of the chunk you use, particularly if you are working in an
+ environment where individual HTTP requests may have a hardcoded time limit,
+ such as under certain classes of requests under Google App Engine.
+
+ Streams are io.Base compatible objects that support seek(). Some MediaUpload
+ subclasses support using streams directly to upload data. Support for
+ streaming may be indicated by a MediaUpload sub-class and if appropriate for a
+ platform that stream will be used for uploading the media object. The support
+ for streaming is indicated by has_stream() returning True. The stream() method
+ should return an io.Base object that supports seek(). On platforms where the
+ underlying httplib module supports streaming, for example Python 2.6 and
+ later, the stream will be passed into the http library which will result in
+ less memory being used and possibly faster uploads.
+
+ If you need to upload media that can't be uploaded using any of the existing
+ MediaUpload sub-class then you can sub-class MediaUpload for your particular
+ needs.
+ """
+
+ def chunksize(self):
+ """Chunk size for resumable uploads.
+
+ Returns:
+ Chunk size in bytes.
+ """
+ raise NotImplementedError()
+
+ def mimetype(self):
+ """Mime type of the body.
+
+ Returns:
+ Mime type.
+ """
+ return "application/octet-stream"
+
+ def size(self):
+ """Size of upload.
+
+ Returns:
+ Size of the body, or None of the size is unknown.
+ """
+ return None
+
+ def resumable(self):
+ """Whether this upload is resumable.
+
+ Returns:
+ True if resumable upload or False.
+ """
+ return False
+
+ def getbytes(self, begin, end):
+ """Get bytes from the media.
+
+ Args:
+ begin: int, offset from beginning of file.
+ length: int, number of bytes to read, starting at begin.
+
+ Returns:
+ A string of bytes read. May be shorter than length if EOF was reached
+ first.
+ """
+ raise NotImplementedError()
+
+ def has_stream(self):
+ """Does the underlying upload support a streaming interface.
+
+ Streaming means it is an io.IOBase subclass that supports seek, i.e.
+ seekable() returns True.
+
+ Returns:
+ True if the call to stream() will return an instance of a seekable io.Base
+ subclass.
+ """
+ return False
+
+ def stream(self):
+ """A stream interface to the data being uploaded.
+
+ Returns:
+ The returned value is an io.IOBase subclass that supports seek, i.e.
+ seekable() returns True.
+ """
+ raise NotImplementedError()
+
+ @util.positional(1)
+ def _to_json(self, strip=None):
+ """Utility function for creating a JSON representation of a MediaUpload.
+
+ Args:
+ strip: array, An array of names of members to not include in the JSON.
+
+ Returns:
+ string, a JSON representation of this instance, suitable to pass to
+ from_json().
+ """
+ t = type(self)
+ d = copy.copy(self.__dict__)
+ if strip is not None:
+ for member in strip:
+ del d[member]
+ d["_class"] = t.__name__
+ d["_module"] = t.__module__
+ return json.dumps(d)
+
+ def to_json(self):
+ """Create a JSON representation of an instance of MediaUpload.
+
+ Returns:
+ string, a JSON representation of this instance, suitable to pass to
+ from_json().
+ """
+ return self._to_json()
+
+ @classmethod
+ def new_from_json(cls, s):
+ """Utility class method to instantiate a MediaUpload subclass from a JSON
+ representation produced by to_json().
+
+ Args:
+ s: string, JSON from to_json().
+
+ Returns:
+ An instance of the subclass of MediaUpload that was serialized with
+ to_json().
+ """
+ data = json.loads(s)
+ # Find and call the right classmethod from_json() to restore the object.
+ module = data["_module"]
+ m = __import__(module, fromlist=module.split(".")[:-1])
+ kls = getattr(m, data["_class"])
+ from_json = getattr(kls, "from_json")
+ return from_json(s)
+
+
+class MediaIoBaseUpload(MediaUpload):
+ """A MediaUpload for a io.Base objects.
+
+ Note that the Python file object is compatible with io.Base and can be used
+ with this class also.
+
+ fh = BytesIO('...Some data to upload...')
+ media = MediaIoBaseUpload(fh, mimetype='image/png',
+ chunksize=1024*1024, resumable=True)
+ farm.animals().insert(
+ id='cow',
+ name='cow.png',
+ media_body=media).execute()
+
+ Depending on the platform you are working on, you may pass -1 as the
+ chunksize, which indicates that the entire file should be uploaded in a single
+ request. If the underlying platform supports streams, such as Python 2.6 or
+ later, then this can be very efficient as it avoids multiple connections, and
+ also avoids loading the entire file into memory before sending it. Note that
+ Google App Engine has a 5MB limit on request size, so you should never set
+ your chunksize larger than 5MB, or to -1.
+ """
+
+ @util.positional(3)
+ def __init__(self, fd, mimetype, chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
+ """Constructor.
+
+ Args:
+ fd: io.Base or file object, The source of the bytes to upload. MUST be
+ opened in blocking mode, do not use streams opened in non-blocking mode.
+ The given stream must be seekable, that is, it must be able to call
+ seek() on fd.
+ mimetype: string, Mime-type of the file.
+ chunksize: int, File will be uploaded in chunks of this many bytes. Only
+ used if resumable=True. Pass in a value of -1 if the file is to be
+ uploaded as a single chunk. Note that Google App Engine has a 5MB limit
+ on request size, so you should never set your chunksize larger than 5MB,
+ or to -1.
+ resumable: bool, True if this is a resumable upload. False means upload
+ in a single request.
+ """
+ super(MediaIoBaseUpload, self).__init__()
+ self._fd = fd
+ self._mimetype = mimetype
+ if not (chunksize == -1 or chunksize > 0):
+ raise InvalidChunkSizeError()
+ self._chunksize = chunksize
+ self._resumable = resumable
+
+ self._fd.seek(0, os.SEEK_END)
+ self._size = self._fd.tell()
+
+ def chunksize(self):
+ """Chunk size for resumable uploads.
+
+ Returns:
+ Chunk size in bytes.
+ """
+ return self._chunksize
+
+ def mimetype(self):
+ """Mime type of the body.
+
+ Returns:
+ Mime type.
+ """
+ return self._mimetype
+
+ def size(self):
+ """Size of upload.
+
+ Returns:
+ Size of the body, or None of the size is unknown.
+ """
+ return self._size
+
+ def resumable(self):
+ """Whether this upload is resumable.
+
+ Returns:
+ True if resumable upload or False.
+ """
+ return self._resumable
+
+ def getbytes(self, begin, length):
+ """Get bytes from the media.
+
+ Args:
+ begin: int, offset from beginning of file.
+ length: int, number of bytes to read, starting at begin.
+
+ Returns:
+ A string of bytes read. May be shorted than length if EOF was reached
+ first.
+ """
+ self._fd.seek(begin)
+ return self._fd.read(length)
+
+ def has_stream(self):
+ """Does the underlying upload support a streaming interface.
+
+ Streaming means it is an io.IOBase subclass that supports seek, i.e.
+ seekable() returns True.
+
+ Returns:
+ True if the call to stream() will return an instance of a seekable io.Base
+ subclass.
+ """
+ return True
+
+ def stream(self):
+ """A stream interface to the data being uploaded.
+
+ Returns:
+ The returned value is an io.IOBase subclass that supports seek, i.e.
+ seekable() returns True.
+ """
+ return self._fd
+
+ def to_json(self):
+ """This upload type is not serializable."""
+ raise NotImplementedError("MediaIoBaseUpload is not serializable.")
+
+
+class MediaFileUpload(MediaIoBaseUpload):
+ """A MediaUpload for a file.
+
+ Construct a MediaFileUpload and pass as the media_body parameter of the
+ method. For example, if we had a service that allowed uploading images:
+
+ media = MediaFileUpload('cow.png', mimetype='image/png',
+ chunksize=1024*1024, resumable=True)
+ farm.animals().insert(
+ id='cow',
+ name='cow.png',
+ media_body=media).execute()
+
+ Depending on the platform you are working on, you may pass -1 as the
+ chunksize, which indicates that the entire file should be uploaded in a single
+ request. If the underlying platform supports streams, such as Python 2.6 or
+ later, then this can be very efficient as it avoids multiple connections, and
+ also avoids loading the entire file into memory before sending it. Note that
+ Google App Engine has a 5MB limit on request size, so you should never set
+ your chunksize larger than 5MB, or to -1.
+ """
+
+ @util.positional(2)
+ def __init__(
+ self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE, resumable=False
+ ):
+ """Constructor.
+
+ Args:
+ filename: string, Name of the file.
+ mimetype: string, Mime-type of the file. If None then a mime-type will be
+ guessed from the file extension.
+ chunksize: int, File will be uploaded in chunks of this many bytes. Only
+ used if resumable=True. Pass in a value of -1 if the file is to be
+ uploaded in a single chunk. Note that Google App Engine has a 5MB limit
+ on request size, so you should never set your chunksize larger than 5MB,
+ or to -1.
+ resumable: bool, True if this is a resumable upload. False means upload
+ in a single request.
+ """
+ self._fd = None
+ self._filename = filename
+ self._fd = open(self._filename, "rb")
+ if mimetype is None:
+ # No mimetype provided, make a guess.
+ mimetype, _ = mimetypes.guess_type(filename)
+ if mimetype is None:
+ # Guess failed, use octet-stream.
+ mimetype = "application/octet-stream"
+ super(MediaFileUpload, self).__init__(
+ self._fd, mimetype, chunksize=chunksize, resumable=resumable
+ )
+
+ def __del__(self):
+ if self._fd:
+ self._fd.close()
+
+ def to_json(self):
+ """Creating a JSON representation of an instance of MediaFileUpload.
+
+ Returns:
+ string, a JSON representation of this instance, suitable to pass to
+ from_json().
+ """
+ return self._to_json(strip=["_fd"])
+
+ @staticmethod
+ def from_json(s):
+ d = json.loads(s)
+ return MediaFileUpload(
+ d["_filename"],
+ mimetype=d["_mimetype"],
+ chunksize=d["_chunksize"],
+ resumable=d["_resumable"],
+ )
+
+
+class MediaInMemoryUpload(MediaIoBaseUpload):
+ """MediaUpload for a chunk of bytes.
+
+ DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or io.StringIO for
+ the stream.
+ """
+
+ @util.positional(2)
+ def __init__(
+ self,
+ body,
+ mimetype="application/octet-stream",
+ chunksize=DEFAULT_CHUNK_SIZE,
+ resumable=False,
+ ):
+ """Create a new MediaInMemoryUpload.
+
+ DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or io.StringIO for
+ the stream.
+
+ Args:
+ body: string, Bytes of body content.
+ mimetype: string, Mime-type of the file or default of
+ 'application/octet-stream'.
+ chunksize: int, File will be uploaded in chunks of this many bytes. Only
+ used if resumable=True.
+ resumable: bool, True if this is a resumable upload. False means upload
+ in a single request.
+ """
+ fd = io.BytesIO(body)
+ super(MediaInMemoryUpload, self).__init__(
+ fd, mimetype, chunksize=chunksize, resumable=resumable
+ )
+
+
+class MediaIoBaseDownload(object):
+ """ "Download media resources.
+
+ Note that the Python file object is compatible with io.Base and can be used
+ with this class also.
+
+
+ Example:
+ request = farms.animals().get_media(id='cow')
+ fh = io.FileIO('cow.png', mode='wb')
+ downloader = MediaIoBaseDownload(fh, request, chunksize=1024*1024)
+
+ done = False
+ while done is False:
+ status, done = downloader.next_chunk()
+ if status:
+ print "Download %d%%." % int(status.progress() * 100)
+ print "Download Complete!"
+ """
+
+ @util.positional(3)
+ def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE):
+ """Constructor.
+
+ Args:
+ fd: io.Base or file object, The stream in which to write the downloaded
+ bytes.
+ request: googleapiclient.http.HttpRequest, the media request to perform in
+ chunks.
+ chunksize: int, File will be downloaded in chunks of this many bytes.
+ """
+ self._fd = fd
+ self._request = request
+ self._uri = request.uri
+ self._chunksize = chunksize
+ self._progress = 0
+ self._total_size = None
+ self._done = False
+
+ # Stubs for testing.
+ self._sleep = time.sleep
+ self._rand = random.random
+
+ self._headers = {}
+ for k, v in request.headers.items():
+ # allow users to supply custom headers by setting them on the request
+ # but strip out the ones that are set by default on requests generated by
+ # API methods like Drive's files().get(fileId=...)
+ if not k.lower() in ("accept", "accept-encoding", "user-agent"):
+ self._headers[k] = v
+
+ @util.positional(1)
+ def next_chunk(self, num_retries=0):
+ """Get the next chunk of the download.
+
+ Args:
+ num_retries: Integer, number of times to retry with randomized
+ exponential backoff. If all retries fail, the raised HttpError
+ represents the last request. If zero (default), we attempt the
+ request only once.
+
+ Returns:
+ (status, done): (MediaDownloadProgress, boolean)
+ The value of 'done' will be True when the media has been fully
+ downloaded or the total size of the media is unknown.
+
+ Raises:
+ googleapiclient.errors.HttpError if the response was not a 2xx.
+ httplib2.HttpLib2Error if a transport error has occurred.
+ """
+ headers = self._headers.copy()
+ headers["range"] = "bytes=%d-%d" % (
+ self._progress,
+ self._progress + self._chunksize - 1,
+ )
+ http = self._request.http
+
+ resp, content = _retry_request(
+ http,
+ num_retries,
+ "media download",
+ self._sleep,
+ self._rand,
+ self._uri,
+ "GET",
+ headers=headers,
+ )
+
+ if resp.status in [200, 206]:
+ if "content-location" in resp and resp["content-location"] != self._uri:
+ self._uri = resp["content-location"]
+ self._progress += len(content)
+ self._fd.write(content)
+
+ if "content-range" in resp:
+ content_range = resp["content-range"]
+ length = content_range.rsplit("/", 1)[1]
+ self._total_size = int(length)
+ elif "content-length" in resp:
+ self._total_size = int(resp["content-length"])
+
+ if self._total_size is None or self._progress == self._total_size:
+ self._done = True
+ return MediaDownloadProgress(self._progress, self._total_size), self._done
+ elif resp.status == 416:
+ # 416 is Range Not Satisfiable
+ # This typically occurs with a zero byte file
+ content_range = resp["content-range"]
+ length = content_range.rsplit("/", 1)[1]
+ self._total_size = int(length)
+ if self._total_size == 0:
+ self._done = True
+ return (
+ MediaDownloadProgress(self._progress, self._total_size),
+ self._done,
+ )
+ raise HttpError(resp, content, uri=self._uri)
+
+
+class _StreamSlice(object):
+ """Truncated stream.
+
+ Takes a stream and presents a stream that is a slice of the original stream.
+ This is used when uploading media in chunks. In later versions of Python a
+ stream can be passed to httplib in place of the string of data to send. The
+ problem is that httplib just blindly reads to the end of the stream. This
+ wrapper presents a virtual stream that only reads to the end of the chunk.
+ """
+
+ def __init__(self, stream, begin, chunksize):
+ """Constructor.
+
+ Args:
+ stream: (io.Base, file object), the stream to wrap.
+ begin: int, the seek position the chunk begins at.
+ chunksize: int, the size of the chunk.
+ """
+ self._stream = stream
+ self._begin = begin
+ self._chunksize = chunksize
+ self._stream.seek(begin)
+
+ def read(self, n=-1):
+ """Read n bytes.
+
+ Args:
+ n, int, the number of bytes to read.
+
+ Returns:
+ A string of length 'n', or less if EOF is reached.
+ """
+ # The data left available to read sits in [cur, end)
+ cur = self._stream.tell()
+ end = self._begin + self._chunksize
+ if n == -1 or cur + n > end:
+ n = end - cur
+ return self._stream.read(n)
+
+
+class HttpRequest(object):
+ """Encapsulates a single HTTP request."""
+
+ @util.positional(4)
+ def __init__(
+ self,
+ http,
+ postproc,
+ uri,
+ method="GET",
+ body=None,
+ headers=None,
+ methodId=None,
+ resumable=None,
+ ):
+ """Constructor for an HttpRequest.
+
+ Args:
+ http: httplib2.Http, the transport object to use to make a request
+ postproc: callable, called on the HTTP response and content to transform
+ it into a data object before returning, or raising an exception
+ on an error.
+ uri: string, the absolute URI to send the request to
+ method: string, the HTTP method to use
+ body: string, the request body of the HTTP request,
+ headers: dict, the HTTP request headers
+ methodId: string, a unique identifier for the API method being called.
+ resumable: MediaUpload, None if this is not a resumbale request.
+ """
+ self.uri = uri
+ self.method = method
+ self.body = body
+ self.headers = headers or {}
+ self.methodId = methodId
+ self.http = http
+ self.postproc = postproc
+ self.resumable = resumable
+ self.response_callbacks = []
+ self._in_error_state = False
+
+ # The size of the non-media part of the request.
+ self.body_size = len(self.body or "")
+
+ # The resumable URI to send chunks to.
+ self.resumable_uri = None
+
+ # The bytes that have been uploaded.
+ self.resumable_progress = 0
+
+ # Stubs for testing.
+ self._rand = random.random
+ self._sleep = time.sleep
+
+ @util.positional(1)
+ def execute(self, http=None, num_retries=0):
+ """Execute the request.
+
+ Args:
+ http: httplib2.Http, an http object to be used in place of the
+ one the HttpRequest request object was constructed with.
+ num_retries: Integer, number of times to retry with randomized
+ exponential backoff. If all retries fail, the raised HttpError
+ represents the last request. If zero (default), we attempt the
+ request only once.
+
+ Returns:
+ A deserialized object model of the response body as determined
+ by the postproc.
+
+ Raises:
+ googleapiclient.errors.HttpError if the response was not a 2xx.
+ httplib2.HttpLib2Error if a transport error has occurred.
+ """
+ if http is None:
+ http = self.http
+
+ if self.resumable:
+ body = None
+ while body is None:
+ _, body = self.next_chunk(http=http, num_retries=num_retries)
+ return body
+
+ # Non-resumable case.
+
+ if "content-length" not in self.headers:
+ self.headers["content-length"] = str(self.body_size)
+ # If the request URI is too long then turn it into a POST request.
+ # Assume that a GET request never contains a request body.
+ if len(self.uri) > MAX_URI_LENGTH and self.method == "GET":
+ self.method = "POST"
+ self.headers["x-http-method-override"] = "GET"
+ self.headers["content-type"] = "application/x-www-form-urlencoded"
+ parsed = urllib.parse.urlparse(self.uri)
+ self.uri = urllib.parse.urlunparse(
+ (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None, None)
+ )
+ self.body = parsed.query
+ self.headers["content-length"] = str(len(self.body))
+
+ # Handle retries for server-side errors.
+ resp, content = _retry_request(
+ http,
+ num_retries,
+ "request",
+ self._sleep,
+ self._rand,
+ str(self.uri),
+ method=str(self.method),
+ body=self.body,
+ headers=self.headers,
+ )
+
+ for callback in self.response_callbacks:
+ callback(resp)
+ if resp.status >= 300:
+ raise HttpError(resp, content, uri=self.uri)
+ return self.postproc(resp, content)
+
+ @util.positional(2)
+ def add_response_callback(self, cb):
+ """add_response_headers_callback
+
+ Args:
+ cb: Callback to be called on receiving the response headers, of signature:
+
+ def cb(resp):
+ # Where resp is an instance of httplib2.Response
+ """
+ self.response_callbacks.append(cb)
+
+ @util.positional(1)
+ def next_chunk(self, http=None, num_retries=0):
+ """Execute the next step of a resumable upload.
+
+ Can only be used if the method being executed supports media uploads and
+ the MediaUpload object passed in was flagged as using resumable upload.
+
+ Example:
+
+ media = MediaFileUpload('cow.png', mimetype='image/png',
+ chunksize=1000, resumable=True)
+ request = farm.animals().insert(
+ id='cow',
+ name='cow.png',
+ media_body=media)
+
+ response = None
+ while response is None:
+ status, response = request.next_chunk()
+ if status:
+ print "Upload %d%% complete." % int(status.progress() * 100)
+
+
+ Args:
+ http: httplib2.Http, an http object to be used in place of the
+ one the HttpRequest request object was constructed with.
+ num_retries: Integer, number of times to retry with randomized
+ exponential backoff. If all retries fail, the raised HttpError
+ represents the last request. If zero (default), we attempt the
+ request only once.
+
+ Returns:
+ (status, body): (ResumableMediaStatus, object)
+ The body will be None until the resumable media is fully uploaded.
+
+ Raises:
+ googleapiclient.errors.HttpError if the response was not a 2xx.
+ httplib2.HttpLib2Error if a transport error has occurred.
+ """
+ if http is None:
+ http = self.http
+
+ if self.resumable.size() is None:
+ size = "*"
+ else:
+ size = str(self.resumable.size())
+
+ if self.resumable_uri is None:
+ start_headers = copy.copy(self.headers)
+ start_headers["X-Upload-Content-Type"] = self.resumable.mimetype()
+ if size != "*":
+ start_headers["X-Upload-Content-Length"] = size
+ start_headers["content-length"] = str(self.body_size)
+
+ resp, content = _retry_request(
+ http,
+ num_retries,
+ "resumable URI request",
+ self._sleep,
+ self._rand,
+ self.uri,
+ method=self.method,
+ body=self.body,
+ headers=start_headers,
+ )
+
+ if resp.status == 200 and "location" in resp:
+ self.resumable_uri = resp["location"]
+ else:
+ raise ResumableUploadError(resp, content)
+ elif self._in_error_state:
+ # If we are in an error state then query the server for current state of
+ # the upload by sending an empty PUT and reading the 'range' header in
+ # the response.
+ headers = {"Content-Range": "bytes */%s" % size, "content-length": "0"}
+ resp, content = http.request(self.resumable_uri, "PUT", headers=headers)
+ status, body = self._process_response(resp, content)
+ if body:
+ # The upload was complete.
+ return (status, body)
+
+ if self.resumable.has_stream():
+ data = self.resumable.stream()
+ if self.resumable.chunksize() == -1:
+ data.seek(self.resumable_progress)
+ chunk_end = self.resumable.size() - self.resumable_progress - 1
+ else:
+ # Doing chunking with a stream, so wrap a slice of the stream.
+ data = _StreamSlice(
+ data, self.resumable_progress, self.resumable.chunksize()
+ )
+ chunk_end = min(
+ self.resumable_progress + self.resumable.chunksize() - 1,
+ self.resumable.size() - 1,
+ )
+ else:
+ data = self.resumable.getbytes(
+ self.resumable_progress, self.resumable.chunksize()
+ )
+
+ # A short read implies that we are at EOF, so finish the upload.
+ if len(data) < self.resumable.chunksize():
+ size = str(self.resumable_progress + len(data))
+
+ chunk_end = self.resumable_progress + len(data) - 1
+
+ headers = {
+ # Must set the content-length header here because httplib can't
+ # calculate the size when working with _StreamSlice.
+ "Content-Length": str(chunk_end - self.resumable_progress + 1),
+ }
+
+ # An empty file results in chunk_end = -1 and size = 0
+ # sending "bytes 0--1/0" results in an invalid request
+ # Only add header "Content-Range" if chunk_end != -1
+ if chunk_end != -1:
+ headers["Content-Range"] = "bytes %d-%d/%s" % (
+ self.resumable_progress,
+ chunk_end,
+ size,
+ )
+
+ for retry_num in range(num_retries + 1):
+ if retry_num > 0:
+ self._sleep(self._rand() * 2**retry_num)
+ LOGGER.warning(
+ "Retry #%d for media upload: %s %s, following status: %d"
+ % (retry_num, self.method, self.uri, resp.status)
+ )
+
+ try:
+ resp, content = http.request(
+ self.resumable_uri, method="PUT", body=data, headers=headers
+ )
+ except:
+ self._in_error_state = True
+ raise
+ if not _should_retry_response(resp.status, content):
+ break
+
+ return self._process_response(resp, content)
+
+ def _process_response(self, resp, content):
+ """Process the response from a single chunk upload.
+
+ Args:
+ resp: httplib2.Response, the response object.
+ content: string, the content of the response.
+
+ Returns:
+ (status, body): (ResumableMediaStatus, object)
+ The body will be None until the resumable media is fully uploaded.
+
+ Raises:
+ googleapiclient.errors.HttpError if the response was not a 2xx or a 308.
+ """
+ if resp.status in [200, 201]:
+ self._in_error_state = False
+ return None, self.postproc(resp, content)
+ elif resp.status == 308:
+ self._in_error_state = False
+ # A "308 Resume Incomplete" indicates we are not done.
+ try:
+ self.resumable_progress = int(resp["range"].split("-")[1]) + 1
+ except KeyError:
+ # If resp doesn't contain range header, resumable progress is 0
+ self.resumable_progress = 0
+ if "location" in resp:
+ self.resumable_uri = resp["location"]
+ else:
+ self._in_error_state = True
+ raise HttpError(resp, content, uri=self.uri)
+
+ return (
+ MediaUploadProgress(self.resumable_progress, self.resumable.size()),
+ None,
+ )
+
+ def to_json(self):
+ """Returns a JSON representation of the HttpRequest."""
+ d = copy.copy(self.__dict__)
+ if d["resumable"] is not None:
+ d["resumable"] = self.resumable.to_json()
+ del d["http"]
+ del d["postproc"]
+ del d["_sleep"]
+ del d["_rand"]
+
+ return json.dumps(d)
+
+ @staticmethod
+ def from_json(s, http, postproc):
+ """Returns an HttpRequest populated with info from a JSON object."""
+ d = json.loads(s)
+ if d["resumable"] is not None:
+ d["resumable"] = MediaUpload.new_from_json(d["resumable"])
+ return HttpRequest(
+ http,
+ postproc,
+ uri=d["uri"],
+ method=d["method"],
+ body=d["body"],
+ headers=d["headers"],
+ methodId=d["methodId"],
+ resumable=d["resumable"],
+ )
+
+ @staticmethod
+ def null_postproc(resp, contents):
+ return resp, contents
+
+
+class BatchHttpRequest(object):
+ """Batches multiple HttpRequest objects into a single HTTP request.
+
+ Example:
+ from googleapiclient.http import BatchHttpRequest
+
+ def list_animals(request_id, response, exception):
+ \"\"\"Do something with the animals list response.\"\"\"
+ if exception is not None:
+ # Do something with the exception.
+ pass
+ else:
+ # Do something with the response.
+ pass
+
+ def list_farmers(request_id, response, exception):
+ \"\"\"Do something with the farmers list response.\"\"\"
+ if exception is not None:
+ # Do something with the exception.
+ pass
+ else:
+ # Do something with the response.
+ pass
+
+ service = build('farm', 'v2')
+
+ batch = BatchHttpRequest()
+
+ batch.add(service.animals().list(), list_animals)
+ batch.add(service.farmers().list(), list_farmers)
+ batch.execute(http=http)
+ """
+
+ @util.positional(1)
+ def __init__(self, callback=None, batch_uri=None):
+ """Constructor for a BatchHttpRequest.
+
+ Args:
+ callback: callable, A callback to be called for each response, of the
+ form callback(id, response, exception). The first parameter is the
+ request id, and the second is the deserialized response object. The
+ third is an googleapiclient.errors.HttpError exception object if an HTTP error
+ occurred while processing the request, or None if no error occurred.
+ batch_uri: string, URI to send batch requests to.
+ """
+ if batch_uri is None:
+ batch_uri = _LEGACY_BATCH_URI
+
+ if batch_uri == _LEGACY_BATCH_URI:
+ LOGGER.warning(
+ "You have constructed a BatchHttpRequest using the legacy batch "
+ "endpoint %s. This endpoint will be turned down on August 12, 2020. "
+ "Please provide the API-specific endpoint or use "
+ "service.new_batch_http_request(). For more details see "
+ "https://developers.googleblog.com/2018/03/discontinuing-support-for-json-rpc-and.html"
+ "and https://developers.google.com/api-client-library/python/guide/batch.",
+ _LEGACY_BATCH_URI,
+ )
+ self._batch_uri = batch_uri
+
+ # Global callback to be called for each individual response in the batch.
+ self._callback = callback
+
+ # A map from id to request.
+ self._requests = {}
+
+ # A map from id to callback.
+ self._callbacks = {}
+
+ # List of request ids, in the order in which they were added.
+ self._order = []
+
+ # The last auto generated id.
+ self._last_auto_id = 0
+
+ # Unique ID on which to base the Content-ID headers.
+ self._base_id = None
+
+ # A map from request id to (httplib2.Response, content) response pairs
+ self._responses = {}
+
+ # A map of id(Credentials) that have been refreshed.
+ self._refreshed_credentials = {}
+
+ def _refresh_and_apply_credentials(self, request, http):
+ """Refresh the credentials and apply to the request.
+
+ Args:
+ request: HttpRequest, the request.
+ http: httplib2.Http, the global http object for the batch.
+ """
+ # For the credentials to refresh, but only once per refresh_token
+ # If there is no http per the request then refresh the http passed in
+ # via execute()
+ creds = None
+ request_credentials = False
+
+ if request.http is not None:
+ creds = _auth.get_credentials_from_http(request.http)
+ request_credentials = True
+
+ if creds is None and http is not None:
+ creds = _auth.get_credentials_from_http(http)
+
+ if creds is not None:
+ if id(creds) not in self._refreshed_credentials:
+ _auth.refresh_credentials(creds)
+ self._refreshed_credentials[id(creds)] = 1
+
+ # Only apply the credentials if we are using the http object passed in,
+ # otherwise apply() will get called during _serialize_request().
+ if request.http is None or not request_credentials:
+ _auth.apply_credentials(creds, request.headers)
+
+ def _id_to_header(self, id_):
+ """Convert an id to a Content-ID header value.
+
+ Args:
+ id_: string, identifier of individual request.
+
+ Returns:
+ A Content-ID header with the id_ encoded into it. A UUID is prepended to
+ the value because Content-ID headers are supposed to be universally
+ unique.
+ """
+ if self._base_id is None:
+ self._base_id = uuid.uuid4()
+
+ # NB: we intentionally leave whitespace between base/id and '+', so RFC2822
+ # line folding works properly on Python 3; see
+ # https://github.com/googleapis/google-api-python-client/issues/164
+ return "<%s + %s>" % (self._base_id, urllib.parse.quote(id_))
+
+ def _header_to_id(self, header):
+ """Convert a Content-ID header value to an id.
+
+ Presumes the Content-ID header conforms to the format that _id_to_header()
+ returns.
+
+ Args:
+ header: string, Content-ID header value.
+
+ Returns:
+ The extracted id value.
+
+ Raises:
+ BatchError if the header is not in the expected format.
+ """
+ if header[0] != "<" or header[-1] != ">":
+ raise BatchError("Invalid value for Content-ID: %s" % header)
+ if "+" not in header:
+ raise BatchError("Invalid value for Content-ID: %s" % header)
+ base, id_ = header[1:-1].split(" + ", 1)
+
+ return urllib.parse.unquote(id_)
+
+ def _serialize_request(self, request):
+ """Convert an HttpRequest object into a string.
+
+ Args:
+ request: HttpRequest, the request to serialize.
+
+ Returns:
+ The request as a string in application/http format.
+ """
+ # Construct status line
+ parsed = urllib.parse.urlparse(request.uri)
+ request_line = urllib.parse.urlunparse(
+ ("", "", parsed.path, parsed.params, parsed.query, "")
+ )
+ status_line = request.method + " " + request_line + " HTTP/1.1\n"
+ major, minor = request.headers.get("content-type", "application/json").split(
+ "/"
+ )
+ msg = MIMENonMultipart(major, minor)
+ headers = request.headers.copy()
+
+ if request.http is not None:
+ credentials = _auth.get_credentials_from_http(request.http)
+ if credentials is not None:
+ _auth.apply_credentials(credentials, headers)
+
+ # MIMENonMultipart adds its own Content-Type header.
+ if "content-type" in headers:
+ del headers["content-type"]
+
+ for key, value in headers.items():
+ msg[key] = value
+ msg["Host"] = parsed.netloc
+ msg.set_unixfrom(None)
+
+ if request.body is not None:
+ msg.set_payload(request.body)
+ msg["content-length"] = str(len(request.body))
+
+ # Serialize the mime message.
+ fp = io.StringIO()
+ # maxheaderlen=0 means don't line wrap headers.
+ g = Generator(fp, maxheaderlen=0)
+ g.flatten(msg, unixfrom=False)
+ body = fp.getvalue()
+
+ return status_line + body
+
+ def _deserialize_response(self, payload):
+ """Convert string into httplib2 response and content.
+
+ Args:
+ payload: string, headers and body as a string.
+
+ Returns:
+ A pair (resp, content), such as would be returned from httplib2.request.
+ """
+ # Strip off the status line
+ status_line, payload = payload.split("\n", 1)
+ protocol, status, reason = status_line.split(" ", 2)
+
+ # Parse the rest of the response
+ parser = FeedParser()
+ parser.feed(payload)
+ msg = parser.close()
+ msg["status"] = status
+
+ # Create httplib2.Response from the parsed headers.
+ resp = httplib2.Response(msg)
+ resp.reason = reason
+ resp.version = int(protocol.split("/", 1)[1].replace(".", ""))
+
+ content = payload.split("\r\n\r\n", 1)[1]
+
+ return resp, content
+
+ def _new_id(self):
+ """Create a new id.
+
+ Auto incrementing number that avoids conflicts with ids already used.
+
+ Returns:
+ string, a new unique id.
+ """
+ self._last_auto_id += 1
+ while str(self._last_auto_id) in self._requests:
+ self._last_auto_id += 1
+ return str(self._last_auto_id)
+
+ @util.positional(2)
+ def add(self, request, callback=None, request_id=None):
+ """Add a new request.
+
+ Every callback added will be paired with a unique id, the request_id. That
+ unique id will be passed back to the callback when the response comes back
+ from the server. The default behavior is to have the library generate it's
+ own unique id. If the caller passes in a request_id then they must ensure
+ uniqueness for each request_id, and if they are not an exception is
+ raised. Callers should either supply all request_ids or never supply a
+ request id, to avoid such an error.
+
+ Args:
+ request: HttpRequest, Request to add to the batch.
+ callback: callable, A callback to be called for this response, of the
+ form callback(id, response, exception). The first parameter is the
+ request id, and the second is the deserialized response object. The
+ third is an googleapiclient.errors.HttpError exception object if an HTTP error
+ occurred while processing the request, or None if no errors occurred.
+ request_id: string, A unique id for the request. The id will be passed
+ to the callback with the response.
+
+ Returns:
+ None
+
+ Raises:
+ BatchError if a media request is added to a batch.
+ KeyError is the request_id is not unique.
+ """
+
+ if len(self._order) >= MAX_BATCH_LIMIT:
+ raise BatchError(
+ "Exceeded the maximum calls(%d) in a single batch request."
+ % MAX_BATCH_LIMIT
+ )
+ if request_id is None:
+ request_id = self._new_id()
+ if request.resumable is not None:
+ raise BatchError("Media requests cannot be used in a batch request.")
+ if request_id in self._requests:
+ raise KeyError("A request with this ID already exists: %s" % request_id)
+ self._requests[request_id] = request
+ self._callbacks[request_id] = callback
+ self._order.append(request_id)
+
+ def _execute(self, http, order, requests):
+ """Serialize batch request, send to server, process response.
+
+ Args:
+ http: httplib2.Http, an http object to be used to make the request with.
+ order: list, list of request ids in the order they were added to the
+ batch.
+ requests: list, list of request objects to send.
+
+ Raises:
+ httplib2.HttpLib2Error if a transport error has occurred.
+ googleapiclient.errors.BatchError if the response is the wrong format.
+ """
+ message = MIMEMultipart("mixed")
+ # Message should not write out it's own headers.
+ setattr(message, "_write_headers", lambda self: None)
+
+ # Add all the individual requests.
+ for request_id in order:
+ request = requests[request_id]
+
+ msg = MIMENonMultipart("application", "http")
+ msg["Content-Transfer-Encoding"] = "binary"
+ msg["Content-ID"] = self._id_to_header(request_id)
+
+ body = self._serialize_request(request)
+ msg.set_payload(body)
+ message.attach(msg)
+
+ # encode the body: note that we can't use `as_string`, because
+ # it plays games with `From ` lines.
+ fp = io.StringIO()
+ g = Generator(fp, mangle_from_=False)
+ g.flatten(message, unixfrom=False)
+ body = fp.getvalue()
+
+ headers = {}
+ headers["content-type"] = (
+ "multipart/mixed; " 'boundary="%s"'
+ ) % message.get_boundary()
+
+ resp, content = http.request(
+ self._batch_uri, method="POST", body=body, headers=headers
+ )
+
+ if resp.status >= 300:
+ raise HttpError(resp, content, uri=self._batch_uri)
+
+ # Prepend with a content-type header so FeedParser can handle it.
+ header = "content-type: %s\r\n\r\n" % resp["content-type"]
+ # PY3's FeedParser only accepts unicode. So we should decode content
+ # here, and encode each payload again.
+ content = content.decode("utf-8")
+ for_parser = header + content
+
+ parser = FeedParser()
+ parser.feed(for_parser)
+ mime_response = parser.close()
+
+ if not mime_response.is_multipart():
+ raise BatchError(
+ "Response not in multipart/mixed format.", resp=resp, content=content
+ )
+
+ for part in mime_response.get_payload():
+ request_id = self._header_to_id(part["Content-ID"])
+ response, content = self._deserialize_response(part.get_payload())
+ # We encode content here to emulate normal http response.
+ if isinstance(content, str):
+ content = content.encode("utf-8")
+ self._responses[request_id] = (response, content)
+
+ @util.positional(1)
+ def execute(self, http=None):
+ """Execute all the requests as a single batched HTTP request.
+
+ Args:
+ http: httplib2.Http, an http object to be used in place of the one the
+ HttpRequest request object was constructed with. If one isn't supplied
+ then use a http object from the requests in this batch.
+
+ Returns:
+ None
+
+ Raises:
+ httplib2.HttpLib2Error if a transport error has occurred.
+ googleapiclient.errors.BatchError if the response is the wrong format.
+ """
+ # If we have no requests return
+ if len(self._order) == 0:
+ return None
+
+ # If http is not supplied use the first valid one given in the requests.
+ if http is None:
+ for request_id in self._order:
+ request = self._requests[request_id]
+ if request is not None:
+ http = request.http
+ break
+
+ if http is None:
+ raise ValueError("Missing a valid http object.")
+
+ # Special case for OAuth2Credentials-style objects which have not yet been
+ # refreshed with an initial access_token.
+ creds = _auth.get_credentials_from_http(http)
+ if creds is not None:
+ if not _auth.is_valid(creds):
+ LOGGER.info("Attempting refresh to obtain initial access_token")
+ _auth.refresh_credentials(creds)
+
+ self._execute(http, self._order, self._requests)
+
+ # Loop over all the requests and check for 401s. For each 401 request the
+ # credentials should be refreshed and then sent again in a separate batch.
+ redo_requests = {}
+ redo_order = []
+
+ for request_id in self._order:
+ resp, content = self._responses[request_id]
+ if resp["status"] == "401":
+ redo_order.append(request_id)
+ request = self._requests[request_id]
+ self._refresh_and_apply_credentials(request, http)
+ redo_requests[request_id] = request
+
+ if redo_requests:
+ self._execute(http, redo_order, redo_requests)
+
+ # Now process all callbacks that are erroring, and raise an exception for
+ # ones that return a non-2xx response? Or add extra parameter to callback
+ # that contains an HttpError?
+
+ for request_id in self._order:
+ resp, content = self._responses[request_id]
+
+ request = self._requests[request_id]
+ callback = self._callbacks[request_id]
+
+ response = None
+ exception = None
+ try:
+ if resp.status >= 300:
+ raise HttpError(resp, content, uri=request.uri)
+ response = request.postproc(resp, content)
+ except HttpError as e:
+ exception = e
+
+ if callback is not None:
+ callback(request_id, response, exception)
+ if self._callback is not None:
+ self._callback(request_id, response, exception)
+
+
+class HttpRequestMock(object):
+ """Mock of HttpRequest.
+
+ Do not construct directly, instead use RequestMockBuilder.
+ """
+
+ def __init__(self, resp, content, postproc):
+ """Constructor for HttpRequestMock
+
+ Args:
+ resp: httplib2.Response, the response to emulate coming from the request
+ content: string, the response body
+ postproc: callable, the post processing function usually supplied by
+ the model class. See model.JsonModel.response() as an example.
+ """
+ self.resp = resp
+ self.content = content
+ self.postproc = postproc
+ if resp is None:
+ self.resp = httplib2.Response({"status": 200, "reason": "OK"})
+ if "reason" in self.resp:
+ self.resp.reason = self.resp["reason"]
+
+ def execute(self, http=None):
+ """Execute the request.
+
+ Same behavior as HttpRequest.execute(), but the response is
+ mocked and not really from an HTTP request/response.
+ """
+ return self.postproc(self.resp, self.content)
+
+
+class RequestMockBuilder(object):
+ """A simple mock of HttpRequest
+
+ Pass in a dictionary to the constructor that maps request methodIds to
+ tuples of (httplib2.Response, content, opt_expected_body) that should be
+ returned when that method is called. None may also be passed in for the
+ httplib2.Response, in which case a 200 OK response will be generated.
+ If an opt_expected_body (str or dict) is provided, it will be compared to
+ the body and UnexpectedBodyError will be raised on inequality.
+
+ Example:
+ response = '{"data": {"id": "tag:google.c...'
+ requestBuilder = RequestMockBuilder(
+ {
+ 'plus.activities.get': (None, response),
+ }
+ )
+ googleapiclient.discovery.build("plus", "v1", requestBuilder=requestBuilder)
+
+ Methods that you do not supply a response for will return a
+ 200 OK with an empty string as the response content or raise an excpetion
+ if check_unexpected is set to True. The methodId is taken from the rpcName
+ in the discovery document.
+
+ For more details see the project wiki.
+ """
+
+ def __init__(self, responses, check_unexpected=False):
+ """Constructor for RequestMockBuilder
+
+ The constructed object should be a callable object
+ that can replace the class HttpResponse.
+
+ responses - A dictionary that maps methodIds into tuples
+ of (httplib2.Response, content). The methodId
+ comes from the 'rpcName' field in the discovery
+ document.
+ check_unexpected - A boolean setting whether or not UnexpectedMethodError
+ should be raised on unsupplied method.
+ """
+ self.responses = responses
+ self.check_unexpected = check_unexpected
+
+ def __call__(
+ self,
+ http,
+ postproc,
+ uri,
+ method="GET",
+ body=None,
+ headers=None,
+ methodId=None,
+ resumable=None,
+ ):
+ """Implements the callable interface that discovery.build() expects
+ of requestBuilder, which is to build an object compatible with
+ HttpRequest.execute(). See that method for the description of the
+ parameters and the expected response.
+ """
+ if methodId in self.responses:
+ response = self.responses[methodId]
+ resp, content = response[:2]
+ if len(response) > 2:
+ # Test the body against the supplied expected_body.
+ expected_body = response[2]
+ if bool(expected_body) != bool(body):
+ # Not expecting a body and provided one
+ # or expecting a body and not provided one.
+ raise UnexpectedBodyError(expected_body, body)
+ if isinstance(expected_body, str):
+ expected_body = json.loads(expected_body)
+ body = json.loads(body)
+ if body != expected_body:
+ raise UnexpectedBodyError(expected_body, body)
+ return HttpRequestMock(resp, content, postproc)
+ elif self.check_unexpected:
+ raise UnexpectedMethodError(methodId=methodId)
+ else:
+ model = JsonModel(False)
+ return HttpRequestMock(None, "{}", model.response)
+
+
+class HttpMock(object):
+ """Mock of httplib2.Http"""
+
+ def __init__(self, filename=None, headers=None):
+ """
+ Args:
+ filename: string, absolute filename to read response from
+ headers: dict, header to return with response
+ """
+ if headers is None:
+ headers = {"status": "200"}
+ if filename:
+ with open(filename, "rb") as f:
+ self.data = f.read()
+ else:
+ self.data = None
+ self.response_headers = headers
+ self.headers = None
+ self.uri = None
+ self.method = None
+ self.body = None
+ self.headers = None
+
+ def request(
+ self,
+ uri,
+ method="GET",
+ body=None,
+ headers=None,
+ redirections=1,
+ connection_type=None,
+ ):
+ self.uri = uri
+ self.method = method
+ self.body = body
+ self.headers = headers
+ return httplib2.Response(self.response_headers), self.data
+
+ def close(self):
+ return None
+
+
+class HttpMockSequence(object):
+ """Mock of httplib2.Http
+
+ Mocks a sequence of calls to request returning different responses for each
+ call. Create an instance initialized with the desired response headers
+ and content and then use as if an httplib2.Http instance.
+
+ http = HttpMockSequence([
+ ({'status': '401'}, ''),
+ ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'),
+ ({'status': '200'}, 'echo_request_headers'),
+ ])
+ resp, content = http.request("http://examples.com")
+
+ There are special values you can pass in for content to trigger
+ behavours that are helpful in testing.
+
+ 'echo_request_headers' means return the request headers in the response body
+ 'echo_request_headers_as_json' means return the request headers in
+ the response body
+ 'echo_request_body' means return the request body in the response body
+ 'echo_request_uri' means return the request uri in the response body
+ """
+
+ def __init__(self, iterable):
+ """
+ Args:
+ iterable: iterable, a sequence of pairs of (headers, body)
+ """
+ self._iterable = iterable
+ self.follow_redirects = True
+ self.request_sequence = list()
+
+ def request(
+ self,
+ uri,
+ method="GET",
+ body=None,
+ headers=None,
+ redirections=1,
+ connection_type=None,
+ ):
+ # Remember the request so after the fact this mock can be examined
+ self.request_sequence.append((uri, method, body, headers))
+ resp, content = self._iterable.pop(0)
+ if isinstance(content, str):
+ content = content.encode("utf-8")
+
+ if content == b"echo_request_headers":
+ content = headers
+ elif content == b"echo_request_headers_as_json":
+ content = json.dumps(headers)
+ elif content == b"echo_request_body":
+ if hasattr(body, "read"):
+ content = body.read()
+ else:
+ content = body
+ elif content == b"echo_request_uri":
+ content = uri
+ if isinstance(content, str):
+ content = content.encode("utf-8")
+ return httplib2.Response(resp), content
+
+
+def set_user_agent(http, user_agent):
+ """Set the user-agent on every request.
+
+ Args:
+ http - An instance of httplib2.Http
+ or something that acts like it.
+ user_agent: string, the value for the user-agent header.
+
+ Returns:
+ A modified instance of http that was passed in.
+
+ Example:
+
+ h = httplib2.Http()
+ h = set_user_agent(h, "my-app-name/6.0")
+
+ Most of the time the user-agent will be set doing auth, this is for the rare
+ cases where you are accessing an unauthenticated endpoint.
+ """
+ request_orig = http.request
+
+ # The closure that will replace 'httplib2.Http.request'.
+ def new_request(
+ uri,
+ method="GET",
+ body=None,
+ headers=None,
+ redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+ connection_type=None,
+ ):
+ """Modify the request headers to add the user-agent."""
+ if headers is None:
+ headers = {}
+ if "user-agent" in headers:
+ headers["user-agent"] = user_agent + " " + headers["user-agent"]
+ else:
+ headers["user-agent"] = user_agent
+ resp, content = request_orig(
+ uri,
+ method=method,
+ body=body,
+ headers=headers,
+ redirections=redirections,
+ connection_type=connection_type,
+ )
+ return resp, content
+
+ http.request = new_request
+ return http
+
+
+def tunnel_patch(http):
+ """Tunnel PATCH requests over POST.
+ Args:
+ http - An instance of httplib2.Http
+ or something that acts like it.
+
+ Returns:
+ A modified instance of http that was passed in.
+
+ Example:
+
+ h = httplib2.Http()
+ h = tunnel_patch(h, "my-app-name/6.0")
+
+ Useful if you are running on a platform that doesn't support PATCH.
+ Apply this last if you are using OAuth 1.0, as changing the method
+ will result in a different signature.
+ """
+ request_orig = http.request
+
+ # The closure that will replace 'httplib2.Http.request'.
+ def new_request(
+ uri,
+ method="GET",
+ body=None,
+ headers=None,
+ redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+ connection_type=None,
+ ):
+ """Modify the request headers to add the user-agent."""
+ if headers is None:
+ headers = {}
+ if method == "PATCH":
+ if "oauth_token" in headers.get("authorization", ""):
+ LOGGER.warning(
+ "OAuth 1.0 request made with Credentials after tunnel_patch."
+ )
+ headers["x-http-method-override"] = "PATCH"
+ method = "POST"
+ resp, content = request_orig(
+ uri,
+ method=method,
+ body=body,
+ headers=headers,
+ redirections=redirections,
+ connection_type=connection_type,
+ )
+ return resp, content
+
+ http.request = new_request
+ return http
+
+
+def build_http():
+ """Builds httplib2.Http object
+
+ Returns:
+ A httplib2.Http object, which is used to make http requests, and which has timeout set by default.
+ To override default timeout call
+
+ socket.setdefaulttimeout(timeout_in_sec)
+
+ before interacting with this method.
+ """
+ if socket.getdefaulttimeout() is not None:
+ http_timeout = socket.getdefaulttimeout()
+ else:
+ http_timeout = DEFAULT_HTTP_TIMEOUT_SEC
+ http = httplib2.Http(timeout=http_timeout)
+ # 308's are used by several Google APIs (Drive, YouTube)
+ # for Resumable Uploads rather than Permanent Redirects.
+ # This asks httplib2 to exclude 308s from the status codes
+ # it treats as redirects
+ try:
+ http.redirect_codes = http.redirect_codes - {308}
+ except AttributeError:
+ # Apache Beam tests depend on this library and cannot
+ # currently upgrade their httplib2 version
+ # http.redirect_codes does not exist in previous versions
+ # of httplib2, so pass
+ pass
+
+ return http
diff --git a/Lib/site-packages/googleapiclient/mimeparse.py b/Lib/site-packages/googleapiclient/mimeparse.py
new file mode 100644
index 0000000..d3dedee
--- /dev/null
+++ b/Lib/site-packages/googleapiclient/mimeparse.py
@@ -0,0 +1,183 @@
+# Copyright 2014 Joe Gregorio
+#
+# Licensed under the MIT License
+
+"""MIME-Type Parser
+
+This module provides basic functions for handling mime-types. It can handle
+matching mime-types against a list of media-ranges. See section 14.1 of the
+HTTP specification [RFC 2616] for a complete explanation.
+
+ http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
+
+Contents:
+ - parse_mime_type(): Parses a mime-type into its component parts.
+ - parse_media_range(): Media-ranges are mime-types with wild-cards and a 'q'
+ quality parameter.
+ - quality(): Determines the quality ('q') of a mime-type when
+ compared against a list of media-ranges.
+ - quality_parsed(): Just like quality() except the second parameter must be
+ pre-parsed.
+ - best_match(): Choose the mime-type with the highest quality ('q')
+ from a list of candidates.
+"""
+from __future__ import absolute_import
+
+from functools import reduce
+
+__version__ = "0.1.3"
+__author__ = "Joe Gregorio"
+__email__ = "joe@bitworking.org"
+__license__ = "MIT License"
+__credits__ = ""
+
+
+def parse_mime_type(mime_type):
+ """Parses a mime-type into its component parts.
+
+ Carves up a mime-type and returns a tuple of the (type, subtype, params)
+ where 'params' is a dictionary of all the parameters for the media range.
+ For example, the media range 'application/xhtml;q=0.5' would get parsed
+ into:
+
+ ('application', 'xhtml', {'q', '0.5'})
+ """
+ parts = mime_type.split(";")
+ params = dict(
+ [tuple([s.strip() for s in param.split("=", 1)]) for param in parts[1:]]
+ )
+ full_type = parts[0].strip()
+ # Java URLConnection class sends an Accept header that includes a
+ # single '*'. Turn it into a legal wildcard.
+ if full_type == "*":
+ full_type = "*/*"
+ (type, subtype) = full_type.split("/")
+
+ return (type.strip(), subtype.strip(), params)
+
+
+def parse_media_range(range):
+ """Parse a media-range into its component parts.
+
+ Carves up a media range and returns a tuple of the (type, subtype,
+ params) where 'params' is a dictionary of all the parameters for the media
+ range. For example, the media range 'application/*;q=0.5' would get parsed
+ into:
+
+ ('application', '*', {'q', '0.5'})
+
+ In addition this function also guarantees that there is a value for 'q'
+ in the params dictionary, filling it in with a proper default if
+ necessary.
+ """
+ (type, subtype, params) = parse_mime_type(range)
+ if (
+ "q" not in params
+ or not params["q"]
+ or not float(params["q"])
+ or float(params["q"]) > 1
+ or float(params["q"]) < 0
+ ):
+ params["q"] = "1"
+
+ return (type, subtype, params)
+
+
+def fitness_and_quality_parsed(mime_type, parsed_ranges):
+ """Find the best match for a mime-type amongst parsed media-ranges.
+
+ Find the best match for a given mime-type against a list of media_ranges
+ that have already been parsed by parse_media_range(). Returns a tuple of
+ the fitness value and the value of the 'q' quality parameter of the best
+ match, or (-1, 0) if no match was found. Just as for quality_parsed(),
+ 'parsed_ranges' must be a list of parsed media ranges.
+ """
+ best_fitness = -1
+ best_fit_q = 0
+ (target_type, target_subtype, target_params) = parse_media_range(mime_type)
+ for (type, subtype, params) in parsed_ranges:
+ type_match = type == target_type or type == "*" or target_type == "*"
+ subtype_match = (
+ subtype == target_subtype or subtype == "*" or target_subtype == "*"
+ )
+ if type_match and subtype_match:
+ param_matches = reduce(
+ lambda x, y: x + y,
+ [
+ 1
+ for (key, value) in target_params.items()
+ if key != "q" and key in params and value == params[key]
+ ],
+ 0,
+ )
+ fitness = (type == target_type) and 100 or 0
+ fitness += (subtype == target_subtype) and 10 or 0
+ fitness += param_matches
+ if fitness > best_fitness:
+ best_fitness = fitness
+ best_fit_q = params["q"]
+
+ return best_fitness, float(best_fit_q)
+
+
+def quality_parsed(mime_type, parsed_ranges):
+ """Find the best match for a mime-type amongst parsed media-ranges.
+
+ Find the best match for a given mime-type against a list of media_ranges
+ that have already been parsed by parse_media_range(). Returns the 'q'
+ quality parameter of the best match, 0 if no match was found. This function
+ bahaves the same as quality() except that 'parsed_ranges' must be a list of
+ parsed media ranges.
+ """
+
+ return fitness_and_quality_parsed(mime_type, parsed_ranges)[1]
+
+
+def quality(mime_type, ranges):
+ """Return the quality ('q') of a mime-type against a list of media-ranges.
+
+ Returns the quality 'q' of a mime-type when compared against the
+ media-ranges in ranges. For example:
+
+ >>> quality('text/html','text/*;q=0.3, text/html;q=0.7,
+ text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5')
+ 0.7
+
+ """
+ parsed_ranges = [parse_media_range(r) for r in ranges.split(",")]
+
+ return quality_parsed(mime_type, parsed_ranges)
+
+
+def best_match(supported, header):
+ """Return mime-type with the highest quality ('q') from list of candidates.
+
+ Takes a list of supported mime-types and finds the best match for all the
+ media-ranges listed in header. The value of header must be a string that
+ conforms to the format of the HTTP Accept: header. The value of 'supported'
+ is a list of mime-types. The list of supported mime-types should be sorted
+ in order of increasing desirability, in case of a situation where there is
+ a tie.
+
+ >>> best_match(['application/xbel+xml', 'text/xml'],
+ 'text/*;q=0.5,*/*; q=0.1')
+ 'text/xml'
+ """
+ split_header = _filter_blank(header.split(","))
+ parsed_header = [parse_media_range(r) for r in split_header]
+ weighted_matches = []
+ pos = 0
+ for mime_type in supported:
+ weighted_matches.append(
+ (fitness_and_quality_parsed(mime_type, parsed_header), pos, mime_type)
+ )
+ pos += 1
+ weighted_matches.sort()
+
+ return weighted_matches[-1][0][1] and weighted_matches[-1][2] or ""
+
+
+def _filter_blank(i):
+ for s in i:
+ if s.strip():
+ yield s
diff --git a/Lib/site-packages/googleapiclient/model.py b/Lib/site-packages/googleapiclient/model.py
new file mode 100644
index 0000000..4ba2752
--- /dev/null
+++ b/Lib/site-packages/googleapiclient/model.py
@@ -0,0 +1,409 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Model objects for requests and responses.
+
+Each API may support one or more serializations, such
+as JSON, Atom, etc. The model classes are responsible
+for converting between the wire format and the Python
+object representation.
+"""
+from __future__ import absolute_import
+
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
+
+import json
+import logging
+import platform
+import urllib
+
+from googleapiclient import version as googleapiclient_version
+from googleapiclient.errors import HttpError
+
+_LIBRARY_VERSION = googleapiclient_version.__version__
+_PY_VERSION = platform.python_version()
+
+LOGGER = logging.getLogger(__name__)
+
+dump_request_response = False
+
+
+def _abstract():
+ raise NotImplementedError("You need to override this function")
+
+
+class Model(object):
+ """Model base class.
+
+ All Model classes should implement this interface.
+ The Model serializes and de-serializes between a wire
+ format such as JSON and a Python object representation.
+ """
+
+ def request(self, headers, path_params, query_params, body_value):
+ """Updates outgoing requests with a serialized body.
+
+ Args:
+ headers: dict, request headers
+ path_params: dict, parameters that appear in the request path
+ query_params: dict, parameters that appear in the query
+ body_value: object, the request body as a Python object, which must be
+ serializable.
+ Returns:
+ A tuple of (headers, path_params, query, body)
+
+ headers: dict, request headers
+ path_params: dict, parameters that appear in the request path
+ query: string, query part of the request URI
+ body: string, the body serialized in the desired wire format.
+ """
+ _abstract()
+
+ def response(self, resp, content):
+ """Convert the response wire format into a Python object.
+
+ Args:
+ resp: httplib2.Response, the HTTP response headers and status
+ content: string, the body of the HTTP response
+
+ Returns:
+ The body de-serialized as a Python object.
+
+ Raises:
+ googleapiclient.errors.HttpError if a non 2xx response is received.
+ """
+ _abstract()
+
+
+class BaseModel(Model):
+ """Base model class.
+
+ Subclasses should provide implementations for the "serialize" and
+ "deserialize" methods, as well as values for the following class attributes.
+
+ Attributes:
+ accept: The value to use for the HTTP Accept header.
+ content_type: The value to use for the HTTP Content-type header.
+ no_content_response: The value to return when deserializing a 204 "No
+ Content" response.
+ alt_param: The value to supply as the "alt" query parameter for requests.
+ """
+
+ accept = None
+ content_type = None
+ no_content_response = None
+ alt_param = None
+
+ def _log_request(self, headers, path_params, query, body):
+ """Logs debugging information about the request if requested."""
+ if dump_request_response:
+ LOGGER.info("--request-start--")
+ LOGGER.info("-headers-start-")
+ for h, v in headers.items():
+ LOGGER.info("%s: %s", h, v)
+ LOGGER.info("-headers-end-")
+ LOGGER.info("-path-parameters-start-")
+ for h, v in path_params.items():
+ LOGGER.info("%s: %s", h, v)
+ LOGGER.info("-path-parameters-end-")
+ LOGGER.info("body: %s", body)
+ LOGGER.info("query: %s", query)
+ LOGGER.info("--request-end--")
+
+ def request(self, headers, path_params, query_params, body_value):
+ """Updates outgoing requests with a serialized body.
+
+ Args:
+ headers: dict, request headers
+ path_params: dict, parameters that appear in the request path
+ query_params: dict, parameters that appear in the query
+ body_value: object, the request body as a Python object, which must be
+ serializable by json.
+ Returns:
+ A tuple of (headers, path_params, query, body)
+
+ headers: dict, request headers
+ path_params: dict, parameters that appear in the request path
+ query: string, query part of the request URI
+ body: string, the body serialized as JSON
+ """
+ query = self._build_query(query_params)
+ headers["accept"] = self.accept
+ headers["accept-encoding"] = "gzip, deflate"
+ if "user-agent" in headers:
+ headers["user-agent"] += " "
+ else:
+ headers["user-agent"] = ""
+ headers["user-agent"] += "(gzip)"
+ if "x-goog-api-client" in headers:
+ headers["x-goog-api-client"] += " "
+ else:
+ headers["x-goog-api-client"] = ""
+ headers["x-goog-api-client"] += "gdcl/%s gl-python/%s" % (
+ _LIBRARY_VERSION,
+ _PY_VERSION,
+ )
+
+ if body_value is not None:
+ headers["content-type"] = self.content_type
+ body_value = self.serialize(body_value)
+ self._log_request(headers, path_params, query, body_value)
+ return (headers, path_params, query, body_value)
+
+ def _build_query(self, params):
+ """Builds a query string.
+
+ Args:
+ params: dict, the query parameters
+
+ Returns:
+ The query parameters properly encoded into an HTTP URI query string.
+ """
+ if self.alt_param is not None:
+ params.update({"alt": self.alt_param})
+ astuples = []
+ for key, value in params.items():
+ if type(value) == type([]):
+ for x in value:
+ x = x.encode("utf-8")
+ astuples.append((key, x))
+ else:
+ if isinstance(value, str) and callable(value.encode):
+ value = value.encode("utf-8")
+ astuples.append((key, value))
+ return "?" + urllib.parse.urlencode(astuples)
+
+ def _log_response(self, resp, content):
+ """Logs debugging information about the response if requested."""
+ if dump_request_response:
+ LOGGER.info("--response-start--")
+ for h, v in resp.items():
+ LOGGER.info("%s: %s", h, v)
+ if content:
+ LOGGER.info(content)
+ LOGGER.info("--response-end--")
+
+ def response(self, resp, content):
+ """Convert the response wire format into a Python object.
+
+ Args:
+ resp: httplib2.Response, the HTTP response headers and status
+ content: string, the body of the HTTP response
+
+ Returns:
+ The body de-serialized as a Python object.
+
+ Raises:
+ googleapiclient.errors.HttpError if a non 2xx response is received.
+ """
+ self._log_response(resp, content)
+ # Error handling is TBD, for example, do we retry
+ # for some operation/error combinations?
+ if resp.status < 300:
+ if resp.status == 204:
+ # A 204: No Content response should be treated differently
+ # to all the other success states
+ return self.no_content_response
+ return self.deserialize(content)
+ else:
+ LOGGER.debug("Content from bad request was: %r" % content)
+ raise HttpError(resp, content)
+
+ def serialize(self, body_value):
+ """Perform the actual Python object serialization.
+
+ Args:
+ body_value: object, the request body as a Python object.
+
+ Returns:
+ string, the body in serialized form.
+ """
+ _abstract()
+
+ def deserialize(self, content):
+ """Perform the actual deserialization from response string to Python
+ object.
+
+ Args:
+ content: string, the body of the HTTP response
+
+ Returns:
+ The body de-serialized as a Python object.
+ """
+ _abstract()
+
+
+class JsonModel(BaseModel):
+ """Model class for JSON.
+
+ Serializes and de-serializes between JSON and the Python
+ object representation of HTTP request and response bodies.
+ """
+
+ accept = "application/json"
+ content_type = "application/json"
+ alt_param = "json"
+
+ def __init__(self, data_wrapper=False):
+ """Construct a JsonModel.
+
+ Args:
+ data_wrapper: boolean, wrap requests and responses in a data wrapper
+ """
+ self._data_wrapper = data_wrapper
+
+ def serialize(self, body_value):
+ if (
+ isinstance(body_value, dict)
+ and "data" not in body_value
+ and self._data_wrapper
+ ):
+ body_value = {"data": body_value}
+ return json.dumps(body_value)
+
+ def deserialize(self, content):
+ try:
+ content = content.decode("utf-8")
+ except AttributeError:
+ pass
+ try:
+ body = json.loads(content)
+ except json.decoder.JSONDecodeError:
+ body = content
+ else:
+ if self._data_wrapper and "data" in body:
+ body = body["data"]
+ return body
+
+ @property
+ def no_content_response(self):
+ return {}
+
+
+class RawModel(JsonModel):
+ """Model class for requests that don't return JSON.
+
+ Serializes and de-serializes between JSON and the Python
+ object representation of HTTP request, and returns the raw bytes
+ of the response body.
+ """
+
+ accept = "*/*"
+ content_type = "application/json"
+ alt_param = None
+
+ def deserialize(self, content):
+ return content
+
+ @property
+ def no_content_response(self):
+ return ""
+
+
+class MediaModel(JsonModel):
+ """Model class for requests that return Media.
+
+ Serializes and de-serializes between JSON and the Python
+ object representation of HTTP request, and returns the raw bytes
+ of the response body.
+ """
+
+ accept = "*/*"
+ content_type = "application/json"
+ alt_param = "media"
+
+ def deserialize(self, content):
+ return content
+
+ @property
+ def no_content_response(self):
+ return ""
+
+
+class ProtocolBufferModel(BaseModel):
+ """Model class for protocol buffers.
+
+ Serializes and de-serializes the binary protocol buffer sent in the HTTP
+ request and response bodies.
+ """
+
+ accept = "application/x-protobuf"
+ content_type = "application/x-protobuf"
+ alt_param = "proto"
+
+ def __init__(self, protocol_buffer):
+ """Constructs a ProtocolBufferModel.
+
+ The serialized protocol buffer returned in an HTTP response will be
+ de-serialized using the given protocol buffer class.
+
+ Args:
+ protocol_buffer: The protocol buffer class used to de-serialize a
+ response from the API.
+ """
+ self._protocol_buffer = protocol_buffer
+
+ def serialize(self, body_value):
+ return body_value.SerializeToString()
+
+ def deserialize(self, content):
+ return self._protocol_buffer.FromString(content)
+
+ @property
+ def no_content_response(self):
+ return self._protocol_buffer()
+
+
+def makepatch(original, modified):
+ """Create a patch object.
+
+ Some methods support PATCH, an efficient way to send updates to a resource.
+ This method allows the easy construction of patch bodies by looking at the
+ differences between a resource before and after it was modified.
+
+ Args:
+ original: object, the original deserialized resource
+ modified: object, the modified deserialized resource
+ Returns:
+ An object that contains only the changes from original to modified, in a
+ form suitable to pass to a PATCH method.
+
+ Example usage:
+ item = service.activities().get(postid=postid, userid=userid).execute()
+ original = copy.deepcopy(item)
+ item['object']['content'] = 'This is updated.'
+ service.activities.patch(postid=postid, userid=userid,
+ body=makepatch(original, item)).execute()
+ """
+ patch = {}
+ for key, original_value in original.items():
+ modified_value = modified.get(key, None)
+ if modified_value is None:
+ # Use None to signal that the element is deleted
+ patch[key] = None
+ elif original_value != modified_value:
+ if type(original_value) == type({}):
+ # Recursively descend objects
+ patch[key] = makepatch(original_value, modified_value)
+ else:
+ # In the case of simple types or arrays we just replace
+ patch[key] = modified_value
+ else:
+ # Don't add anything to patch if there's no change
+ pass
+ for key in modified:
+ if key not in original:
+ patch[key] = modified[key]
+
+ return patch
diff --git a/Lib/site-packages/googleapiclient/sample_tools.py b/Lib/site-packages/googleapiclient/sample_tools.py
new file mode 100644
index 0000000..bdad0a2
--- /dev/null
+++ b/Lib/site-packages/googleapiclient/sample_tools.py
@@ -0,0 +1,108 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for making samples.
+
+Consolidates a lot of code commonly repeated in sample applications.
+"""
+from __future__ import absolute_import
+
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
+__all__ = ["init"]
+
+
+import argparse
+import os
+
+from googleapiclient import discovery
+from googleapiclient.http import build_http
+
+
+def init(
+ argv, name, version, doc, filename, scope=None, parents=[], discovery_filename=None
+):
+ """A common initialization routine for samples.
+
+ Many of the sample applications do the same initialization, which has now
+ been consolidated into this function. This function uses common idioms found
+ in almost all the samples, i.e. for an API with name 'apiname', the
+ credentials are stored in a file named apiname.dat, and the
+ client_secrets.json file is stored in the same directory as the application
+ main file.
+
+ Args:
+ argv: list of string, the command-line parameters of the application.
+ name: string, name of the API.
+ version: string, version of the API.
+ doc: string, description of the application. Usually set to __doc__.
+ file: string, filename of the application. Usually set to __file__.
+ parents: list of argparse.ArgumentParser, additional command-line flags.
+ scope: string, The OAuth scope used.
+ discovery_filename: string, name of local discovery file (JSON). Use when discovery doc not available via URL.
+
+ Returns:
+ A tuple of (service, flags), where service is the service object and flags
+ is the parsed command-line flags.
+ """
+ try:
+ from oauth2client import client, file, tools
+ except ImportError:
+ raise ImportError(
+ "googleapiclient.sample_tools requires oauth2client. Please install oauth2client and try again."
+ )
+
+ if scope is None:
+ scope = "https://www.googleapis.com/auth/" + name
+
+ # Parser command-line arguments.
+ parent_parsers = [tools.argparser]
+ parent_parsers.extend(parents)
+ parser = argparse.ArgumentParser(
+ description=doc,
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ parents=parent_parsers,
+ )
+ flags = parser.parse_args(argv[1:])
+
+ # Name of a file containing the OAuth 2.0 information for this
+ # application, including client_id and client_secret, which are found
+ # on the API Access tab on the Google APIs
+ # Console .
+ client_secrets = os.path.join(os.path.dirname(filename), "client_secrets.json")
+
+ # Set up a Flow object to be used if we need to authenticate.
+ flow = client.flow_from_clientsecrets(
+ client_secrets, scope=scope, message=tools.message_if_missing(client_secrets)
+ )
+
+ # Prepare credentials, and authorize HTTP object with them.
+ # If the credentials don't exist or are invalid run through the native client
+ # flow. The Storage object will ensure that if successful the good
+ # credentials will get written back to a file.
+ storage = file.Storage(name + ".dat")
+ credentials = storage.get()
+ if credentials is None or credentials.invalid:
+ credentials = tools.run_flow(flow, storage, flags)
+ http = credentials.authorize(http=build_http())
+
+ if discovery_filename is None:
+ # Construct a service object via the discovery service.
+ service = discovery.build(name, version, http=http)
+ else:
+ # Construct a service object using a local discovery document file.
+ with open(discovery_filename) as discovery_file:
+ service = discovery.build_from_document(
+ discovery_file.read(), base="https://www.googleapis.com/", http=http
+ )
+ return (service, flags)
diff --git a/Lib/site-packages/googleapiclient/schema.py b/Lib/site-packages/googleapiclient/schema.py
new file mode 100644
index 0000000..93b07df
--- /dev/null
+++ b/Lib/site-packages/googleapiclient/schema.py
@@ -0,0 +1,317 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Schema processing for discovery based APIs
+
+Schemas holds an APIs discovery schemas. It can return those schema as
+deserialized JSON objects, or pretty print them as prototype objects that
+conform to the schema.
+
+For example, given the schema:
+
+ schema = \"\"\"{
+ "Foo": {
+ "type": "object",
+ "properties": {
+ "etag": {
+ "type": "string",
+ "description": "ETag of the collection."
+ },
+ "kind": {
+ "type": "string",
+ "description": "Type of the collection ('calendar#acl').",
+ "default": "calendar#acl"
+ },
+ "nextPageToken": {
+ "type": "string",
+ "description": "Token used to access the next
+ page of this result. Omitted if no further results are available."
+ }
+ }
+ }
+ }\"\"\"
+
+ s = Schemas(schema)
+ print s.prettyPrintByName('Foo')
+
+ Produces the following output:
+
+ {
+ "nextPageToken": "A String", # Token used to access the
+ # next page of this result. Omitted if no further results are available.
+ "kind": "A String", # Type of the collection ('calendar#acl').
+ "etag": "A String", # ETag of the collection.
+ },
+
+The constructor takes a discovery document in which to look up named schema.
+"""
+from __future__ import absolute_import
+
+# TODO(jcgregorio) support format, enum, minimum, maximum
+
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
+
+
+from collections import OrderedDict
+
+from googleapiclient import _helpers as util
+
+
+class Schemas(object):
+ """Schemas for an API."""
+
+ def __init__(self, discovery):
+ """Constructor.
+
+ Args:
+ discovery: object, Deserialized discovery document from which we pull
+ out the named schema.
+ """
+ self.schemas = discovery.get("schemas", {})
+
+ # Cache of pretty printed schemas.
+ self.pretty = {}
+
+ @util.positional(2)
+ def _prettyPrintByName(self, name, seen=None, dent=0):
+ """Get pretty printed object prototype from the schema name.
+
+ Args:
+ name: string, Name of schema in the discovery document.
+ seen: list of string, Names of schema already seen. Used to handle
+ recursive definitions.
+
+ Returns:
+ string, A string that contains a prototype object with
+ comments that conforms to the given schema.
+ """
+ if seen is None:
+ seen = []
+
+ if name in seen:
+ # Do not fall into an infinite loop over recursive definitions.
+ return "# Object with schema name: %s" % name
+ seen.append(name)
+
+ if name not in self.pretty:
+ self.pretty[name] = _SchemaToStruct(
+ self.schemas[name], seen, dent=dent
+ ).to_str(self._prettyPrintByName)
+
+ seen.pop()
+
+ return self.pretty[name]
+
+ def prettyPrintByName(self, name):
+ """Get pretty printed object prototype from the schema name.
+
+ Args:
+ name: string, Name of schema in the discovery document.
+
+ Returns:
+ string, A string that contains a prototype object with
+ comments that conforms to the given schema.
+ """
+ # Return with trailing comma and newline removed.
+ return self._prettyPrintByName(name, seen=[], dent=0)[:-2]
+
+ @util.positional(2)
+ def _prettyPrintSchema(self, schema, seen=None, dent=0):
+ """Get pretty printed object prototype of schema.
+
+ Args:
+ schema: object, Parsed JSON schema.
+ seen: list of string, Names of schema already seen. Used to handle
+ recursive definitions.
+
+ Returns:
+ string, A string that contains a prototype object with
+ comments that conforms to the given schema.
+ """
+ if seen is None:
+ seen = []
+
+ return _SchemaToStruct(schema, seen, dent=dent).to_str(self._prettyPrintByName)
+
+ def prettyPrintSchema(self, schema):
+ """Get pretty printed object prototype of schema.
+
+ Args:
+ schema: object, Parsed JSON schema.
+
+ Returns:
+ string, A string that contains a prototype object with
+ comments that conforms to the given schema.
+ """
+ # Return with trailing comma and newline removed.
+ return self._prettyPrintSchema(schema, dent=0)[:-2]
+
+ def get(self, name, default=None):
+ """Get deserialized JSON schema from the schema name.
+
+ Args:
+ name: string, Schema name.
+ default: object, return value if name not found.
+ """
+ return self.schemas.get(name, default)
+
+
+class _SchemaToStruct(object):
+ """Convert schema to a prototype object."""
+
+ @util.positional(3)
+ def __init__(self, schema, seen, dent=0):
+ """Constructor.
+
+ Args:
+ schema: object, Parsed JSON schema.
+ seen: list, List of names of schema already seen while parsing. Used to
+ handle recursive definitions.
+ dent: int, Initial indentation depth.
+ """
+ # The result of this parsing kept as list of strings.
+ self.value = []
+
+ # The final value of the parsing.
+ self.string = None
+
+ # The parsed JSON schema.
+ self.schema = schema
+
+ # Indentation level.
+ self.dent = dent
+
+ # Method that when called returns a prototype object for the schema with
+ # the given name.
+ self.from_cache = None
+
+ # List of names of schema already seen while parsing.
+ self.seen = seen
+
+ def emit(self, text):
+ """Add text as a line to the output.
+
+ Args:
+ text: string, Text to output.
+ """
+ self.value.extend([" " * self.dent, text, "\n"])
+
+ def emitBegin(self, text):
+ """Add text to the output, but with no line terminator.
+
+ Args:
+ text: string, Text to output.
+ """
+ self.value.extend([" " * self.dent, text])
+
+ def emitEnd(self, text, comment):
+ """Add text and comment to the output with line terminator.
+
+ Args:
+ text: string, Text to output.
+ comment: string, Python comment.
+ """
+ if comment:
+ divider = "\n" + " " * (self.dent + 2) + "# "
+ lines = comment.splitlines()
+ lines = [x.rstrip() for x in lines]
+ comment = divider.join(lines)
+ self.value.extend([text, " # ", comment, "\n"])
+ else:
+ self.value.extend([text, "\n"])
+
+ def indent(self):
+ """Increase indentation level."""
+ self.dent += 1
+
+ def undent(self):
+ """Decrease indentation level."""
+ self.dent -= 1
+
+ def _to_str_impl(self, schema):
+ """Prototype object based on the schema, in Python code with comments.
+
+ Args:
+ schema: object, Parsed JSON schema file.
+
+ Returns:
+ Prototype object based on the schema, in Python code with comments.
+ """
+ stype = schema.get("type")
+ if stype == "object":
+ self.emitEnd("{", schema.get("description", ""))
+ self.indent()
+ if "properties" in schema:
+ properties = schema.get("properties", {})
+ sorted_properties = OrderedDict(sorted(properties.items()))
+ for pname, pschema in sorted_properties.items():
+ self.emitBegin('"%s": ' % pname)
+ self._to_str_impl(pschema)
+ elif "additionalProperties" in schema:
+ self.emitBegin('"a_key": ')
+ self._to_str_impl(schema["additionalProperties"])
+ self.undent()
+ self.emit("},")
+ elif "$ref" in schema:
+ schemaName = schema["$ref"]
+ description = schema.get("description", "")
+ s = self.from_cache(schemaName, seen=self.seen)
+ parts = s.splitlines()
+ self.emitEnd(parts[0], description)
+ for line in parts[1:]:
+ self.emit(line.rstrip())
+ elif stype == "boolean":
+ value = schema.get("default", "True or False")
+ self.emitEnd("%s," % str(value), schema.get("description", ""))
+ elif stype == "string":
+ value = schema.get("default", "A String")
+ self.emitEnd('"%s",' % str(value), schema.get("description", ""))
+ elif stype == "integer":
+ value = schema.get("default", "42")
+ self.emitEnd("%s," % str(value), schema.get("description", ""))
+ elif stype == "number":
+ value = schema.get("default", "3.14")
+ self.emitEnd("%s," % str(value), schema.get("description", ""))
+ elif stype == "null":
+ self.emitEnd("None,", schema.get("description", ""))
+ elif stype == "any":
+ self.emitEnd('"",', schema.get("description", ""))
+ elif stype == "array":
+ self.emitEnd("[", schema.get("description"))
+ self.indent()
+ self.emitBegin("")
+ self._to_str_impl(schema["items"])
+ self.undent()
+ self.emit("],")
+ else:
+ self.emit("Unknown type! %s" % stype)
+ self.emitEnd("", "")
+
+ self.string = "".join(self.value)
+ return self.string
+
+ def to_str(self, from_cache):
+ """Prototype object based on the schema, in Python code with comments.
+
+ Args:
+ from_cache: callable(name, seen), Callable that retrieves an object
+ prototype for a schema with the given name. Seen is a list of schema
+ names already seen as we recursively descend the schema definition.
+
+ Returns:
+ Prototype object based on the schema, in Python code with comments.
+ The lines of the code will all be properly indented.
+ """
+ self.from_cache = from_cache
+ return self._to_str_impl(self.schema)
diff --git a/Lib/site-packages/googleapiclient/version.py b/Lib/site-packages/googleapiclient/version.py
new file mode 100644
index 0000000..64b7f19
--- /dev/null
+++ b/Lib/site-packages/googleapiclient/version.py
@@ -0,0 +1,15 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__version__ = "2.117.0"
diff --git a/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/INSTALLER b/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/LICENSE b/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/METADATA b/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/METADATA
new file mode 100644
index 0000000..59dcaf5
--- /dev/null
+++ b/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/METADATA
@@ -0,0 +1,36 @@
+Metadata-Version: 2.1
+Name: googleapis-common-protos
+Version: 1.62.0
+Summary: Common protobufs used in Google APIs
+Home-page: https://github.com/googleapis/python-api-common-protos
+Author: Google LLC
+Author-email: googleapis-packages@google.com
+License: Apache-2.0
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Requires-Python: >=3.7
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Requires-Dist: protobuf !=3.20.0,!=3.20.1,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0.dev0,>=3.19.5
+Provides-Extra: grpc
+Requires-Dist: grpcio <2.0.0.dev0,>=1.44.0 ; extra == 'grpc'
+
+Google APIs common protos
+-------------------------
+
+.. image:: https://img.shields.io/pypi/v/googleapis-common-protos.svg
+ :target: https://pypi.org/project/googleapis-common-protos/
+
+
+googleapis-common-protos contains the python classes generated from the common
+protos in the `googleapis/googleapis `_ repository.
diff --git a/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/RECORD b/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/RECORD
new file mode 100644
index 0000000..33ea6b5
--- /dev/null
+++ b/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/RECORD
@@ -0,0 +1,203 @@
+google/api/__pycache__/annotations_pb2.cpython-312.pyc,,
+google/api/__pycache__/auth_pb2.cpython-312.pyc,,
+google/api/__pycache__/backend_pb2.cpython-312.pyc,,
+google/api/__pycache__/billing_pb2.cpython-312.pyc,,
+google/api/__pycache__/client_pb2.cpython-312.pyc,,
+google/api/__pycache__/config_change_pb2.cpython-312.pyc,,
+google/api/__pycache__/consumer_pb2.cpython-312.pyc,,
+google/api/__pycache__/context_pb2.cpython-312.pyc,,
+google/api/__pycache__/control_pb2.cpython-312.pyc,,
+google/api/__pycache__/distribution_pb2.cpython-312.pyc,,
+google/api/__pycache__/documentation_pb2.cpython-312.pyc,,
+google/api/__pycache__/endpoint_pb2.cpython-312.pyc,,
+google/api/__pycache__/error_reason_pb2.cpython-312.pyc,,
+google/api/__pycache__/field_behavior_pb2.cpython-312.pyc,,
+google/api/__pycache__/field_info_pb2.cpython-312.pyc,,
+google/api/__pycache__/http_pb2.cpython-312.pyc,,
+google/api/__pycache__/httpbody_pb2.cpython-312.pyc,,
+google/api/__pycache__/label_pb2.cpython-312.pyc,,
+google/api/__pycache__/launch_stage_pb2.cpython-312.pyc,,
+google/api/__pycache__/log_pb2.cpython-312.pyc,,
+google/api/__pycache__/logging_pb2.cpython-312.pyc,,
+google/api/__pycache__/metric_pb2.cpython-312.pyc,,
+google/api/__pycache__/monitored_resource_pb2.cpython-312.pyc,,
+google/api/__pycache__/monitoring_pb2.cpython-312.pyc,,
+google/api/__pycache__/policy_pb2.cpython-312.pyc,,
+google/api/__pycache__/quota_pb2.cpython-312.pyc,,
+google/api/__pycache__/resource_pb2.cpython-312.pyc,,
+google/api/__pycache__/routing_pb2.cpython-312.pyc,,
+google/api/__pycache__/service_pb2.cpython-312.pyc,,
+google/api/__pycache__/source_info_pb2.cpython-312.pyc,,
+google/api/__pycache__/system_parameter_pb2.cpython-312.pyc,,
+google/api/__pycache__/usage_pb2.cpython-312.pyc,,
+google/api/__pycache__/visibility_pb2.cpython-312.pyc,,
+google/api/annotations.proto,sha256=09O0u-gMBIHDVQc3b1ZT_P38vcfxaKqwl_Ja5kO36-g,1045
+google/api/annotations_pb2.py,sha256=YF9EPjyDHsuST4Wc-6PKGTU8Me99WEvq4iJqhYxpfkA,2133
+google/api/auth.proto,sha256=u8PdMmE2SGz-KxYJegFwtyjEpuSeVQW-jkWEXJAbQjg,9257
+google/api/auth_pb2.py,sha256=wtl4w33zZZ1CjhsEAJapVolMOdRFP2iOdwLT0L9VfP0,5613
+google/api/backend.proto,sha256=qvxyM1fK4tqaP3uWBCPohZhUnzLmywuwOxdcdDxPTdw,7014
+google/api/backend_pb2.py,sha256=-tzYI1CRFlwbU6VPRaRER5_3cqI0umzmkJA_UUJD4Is,4838
+google/api/billing.proto,sha256=xZ0G1HmUepQukR_Y7ZiUJwb-dFnFOOGiIJb-PbCrd8c,3062
+google/api/billing_pb2.py,sha256=jgYLuA4FLUXJRF-Y0HeRDTsbQrkljz_KarHP9TWCgs0,2933
+google/api/client.proto,sha256=JyLbFe4RvIw_iF7BLqKqYbmqA55t9uLwLQvz2C6SJAs,13791
+google/api/client_pb2.py,sha256=O4EpTM6ae1A27y3Iai2HzNGoYww7snRlS2ZTgcDrg1U,16921
+google/api/config_change.proto,sha256=kuiMcrr5XvH3HFN31V9MhIfdTeChNTnTtETlg-s7N7k,3166
+google/api/config_change_pb2.py,sha256=cUbDOjd9uwousOYM9p7qA4Q7y68aLnKFjF1sSu6bOIM,3374
+google/api/consumer.proto,sha256=nRIVNOKCPv2beOI2AfALSLJfmYlkRe2QriVRBY-lIIQ,2717
+google/api/consumer_pb2.py,sha256=Ov5COY4mlEBH0Hc3Eg6izxrd1iOfBcw78Ue3GG4dZCU,3138
+google/api/context.proto,sha256=BzCCy5wtEl-UTci4sWU0bYTtbp8Dn4zywPHZCZhA70g,3067
+google/api/context_pb2.py,sha256=yP4_kQz8Zm_GnVf8L-DuUvghfnPOKMccabQeB-MWupw,2844
+google/api/control.proto,sha256=ktHfRtYcfxkJ01tkDxHzhCCbFp2pOzhnBAuWFdqwEyg,1436
+google/api/control_pb2.py,sha256=8533F75OpqFwiFn4h_jFWuaB0nIVfYqmI3vrUahX2n4,2300
+google/api/distribution.proto,sha256=60XQt_MNXylP8XZaUVL0pWQ-mNHrwDL20DPX1jqSEDM,8660
+google/api/distribution_pb2.py,sha256=9dKLzQiLjbUyYxfPEAp0NkKSJcgUugfOkcfTo1-SE2Q,7845
+google/api/documentation.proto,sha256=c_DPU7K6ZS8_XDAFTV3WZwJhb9ztiLPz3paRw_-C4pQ,6940
+google/api/documentation_pb2.py,sha256=Mux0olXLDUgMuZ9KBXtrowzwjEO8SjbV_4d9DGSr5AA,3698
+google/api/endpoint.proto,sha256=hpZTTEOcIQKPt4PdsgSN1BXJXTWK5Bbi77RA21Q3G4M,3028
+google/api/endpoint_pb2.py,sha256=lj5ZV1xx8oczPmqFYSOUGUqPYR9jh0ADBkSBoDQswEc,2380
+google/api/error_reason.proto,sha256=_I96UW4XYP-JhM-QhWf-qlLK4b3XcqAmmRFshrpRSkA,21934
+google/api/error_reason_pb2.py,sha256=PhTuBmOUhPYNK6jxGmxnHFXQFW30UqxQaJH3zJixVho,4191
+google/api/field_behavior.proto,sha256=Y_HGhdkTZTXOmLOHJ90s4VMS_i1J91L2QCFcZbzHAkM,4289
+google/api/field_behavior_pb2.py,sha256=oacenlIHPQdsZqTMiWQTEbB1tUOSGm-EFs1js7_BECE,2869
+google/api/field_info.proto,sha256=y2uIa03ZWuMxcyVyDVdVnu4ML_6Z7xchVQOJ39vFSTc,3106
+google/api/field_info_pb2.py,sha256=rFh5xZxM_zQPob1_MplM_3LO1xrSU5snrF22lD11qlo,2920
+google/api/http.proto,sha256=L_4ZHyCammjjDwSTxm1c4Vvo3qwnbVW81BJ_jMf0U-I,15159
+google/api/http_pb2.py,sha256=koUyg-VEMqSMlv-kJklMyBDN3jxjHjMpJL-VJbm5sGA,3701
+google/api/httpbody.proto,sha256=RPXmE_7DRqAjCzaHAvb_CMCn0y8F1LwsZL9R3xnsQ8k,2693
+google/api/httpbody_pb2.py,sha256=OISoFp1ICdU5VKD0eHiVOqyQEC6XkjcwYpQZLoPYt1c,2344
+google/api/label.proto,sha256=JViuxDB6MwdRL5Gk0O6JDptyXisc43fBItUGf_b4_kE,1389
+google/api/label_pb2.py,sha256=_V5_MSffDWP5ONpU1peWP0H_3N4S7qP54MtJxAfVKp4,2581
+google/api/launch_stage.proto,sha256=zZcQL4rXzkrgSqoFe269y3OyUn6hcw8pIvnYnRsf9GA,3083
+google/api/launch_stage_pb2.py,sha256=dyaJQ0t77VOXH3XO0n_jbyidIFfrFiMzvLkQ7eCEzF8,2277
+google/api/log.proto,sha256=MSWzePwp2r1WUBX9kFRHeKUSBVWlyeJGXEHoDuZFiBI,2043
+google/api/log_pb2.py,sha256=H_YgxiGviHLtv7iqTWICVjjV2Al_Fx7voyRy5JySGL4,2402
+google/api/logging.proto,sha256=yKbh67OdirGJB4B754U5P64CpZhZwgl_dslbKsBE48M,3174
+google/api/logging_pb2.py,sha256=PUtosIJ9S36_E4ZA4czj16mr1rOh_a_7te5LyXWWHoA,3014
+google/api/metric.proto,sha256=emZwaXd7TzFNSd2sXUD_XdpK3tfPA9GzyEeZn6BN-_0,10605
+google/api/metric_pb2.py,sha256=EUk_3BU3a8Fw37apksZ_hlc93ynSUu3XRZV52GgUpVA,6432
+google/api/monitored_resource.proto,sha256=8H5Wz9FP9AR08tUfF6CVqTRHqjevJCmNGoWvhtjm_T0,5921
+google/api/monitored_resource_pb2.py,sha256=yKAPdtiTiEfKYSFIeCQwPWZfdo-RxLNSbuz5qVYFlqY,6324
+google/api/monitoring.proto,sha256=9RAWLxXvsiRrPfH7ZJnLxb3uvnoauDMclCON_PciefA,4457
+google/api/monitoring_pb2.py,sha256=sqB9wfL4Ms1jpjThYz0gUbttNu1bMIegV8JnQM1jtpg,3129
+google/api/policy.proto,sha256=jfFD0EcmCDcPSRR4FYXxqJq7ik2jOhcEfCwQnCSLJyI,3254
+google/api/policy_pb2.py,sha256=cmNXJ_IyvwwXftF_NWjwEycYMZvsyB6hSmZ6Jmvix9k,3608
+google/api/quota.proto,sha256=J2XtnPZgFcvNZEUYxKeoOxmZkk9qc0fgw-RaceiIfE4,7138
+google/api/quota_pb2.py,sha256=WUc5bd9VEtegpSdQFjHjW13G93KQSI6g8INdMDg4N3Q,5303
+google/api/resource.proto,sha256=DZKZOJK1tR4LSrTd6dG2hngbOVCVvwdygy10hJtL9Cw,8744
+google/api/resource_pb2.py,sha256=fkpZbj1fa13oQEWlR2mJyEhwVmE0Z2JPtziWx43p6LQ,4870
+google/api/routing.proto,sha256=ckmS9y9-Bjgo9EqDunvX0NxkgXGVLjo5hgK7mmKX2BY,14929
+google/api/routing_pb2.py,sha256=jMlornQKLJA5QWV2Uvg3zkbsYt4MxERuTAGyzAGvi90,3189
+google/api/service.proto,sha256=q6s7MJZIDsOwIGkp526ieNbyG-_hER2ot3F3YuSklWY,6762
+google/api/service_pb2.py,sha256=pjj3yYNuDOwvayJAGHJHdRBie9FKsS_1V5CLEhie0R4,5979
+google/api/source_info.proto,sha256=Wrh5ykV_Xh8LoacqD-cXb8a_zxcpIPR_r0EaUBWD9lg,1091
+google/api/source_info_pb2.py,sha256=PLZrMXESnQSXpxYUHjiUrVFqTiepW3j4sk4wyxEAU2c,2292
+google/api/system_parameter.proto,sha256=vbN1_GdjvsPRiP6MW_fsDzOEHscA7Mzy2i3GqfAZSiQ,3475
+google/api/system_parameter_pb2.py,sha256=PSwnzUUjh2mzEoM5hWKwp0EIU_3wXmxBl9-qNocrJnA,3583
+google/api/usage.proto,sha256=8kJh9mrTKQ7gNxX_VntFH9ShcB4kDJHmzv7S-2SpMsY,3787
+google/api/usage_pb2.py,sha256=sx_aUDinwllJqMW1bTz61583bnAXNHXrYqZ4GSZTrxk,2791
+google/api/visibility.proto,sha256=PITea3Rt7KkZQgSEgMxC234uUlPFTuxcJsLFsjdlYCM,3799
+google/api/visibility_pb2.py,sha256=LqZr2qeBjnzV8IR9tqNRmXI8Bskxt69xMDFD-Fa4u8c,4956
+google/cloud/__pycache__/extended_operations_pb2.cpython-312.pyc,,
+google/cloud/extended_operations.proto,sha256=YJSiUZyj11GQWoC33lbtUkttnz1--TfvVlpk4g9iu60,6308
+google/cloud/extended_operations_pb2.py,sha256=xKWiAi1sLCfEKPYkYnDiwlA7z7rnlBYUMTnWmKsSJDQ,4032
+google/cloud/location/__pycache__/locations_pb2.cpython-312.pyc,,
+google/cloud/location/locations.proto,sha256=7DPpJRIbH8jMe44Isz-8X6vooGE6AnkIFT0sFE84dhs,3604
+google/cloud/location/locations_pb2.py,sha256=ECp0OFSg2o1UiMfy2lWdpqulf4UEish8W20N0eU8PLQ,6899
+google/gapic/metadata/__pycache__/gapic_metadata_pb2.cpython-312.pyc,,
+google/gapic/metadata/gapic_metadata.proto,sha256=QuhLul-63A_1cdrk57GUm3dtbimtpSSCCbjIt6olym8,3393
+google/gapic/metadata/gapic_metadata_pb2.py,sha256=x-EJeba3Btl9E0K3W5SqRGKgmjDsEA1FLa6CFVKXv7I,8346
+google/logging/type/__pycache__/http_request_pb2.cpython-312.pyc,,
+google/logging/type/__pycache__/log_severity_pb2.cpython-312.pyc,,
+google/logging/type/http_request.proto,sha256=uzaKHqH3enOvfAYlKC2iE0Hn2iA827KsR7T-a9_gUOQ,3601
+google/logging/type/http_request_pb2.py,sha256=Nb-XAlILoKbfpfj1tpJVpIyx0NvFf-2fttombdlJhhc,3176
+google/logging/type/log_severity.proto,sha256=WB3qvy_1RdtZz0tlDPlcYzgyoA40K2n54tySkbcTOLE,2555
+google/logging/type/log_severity_pb2.py,sha256=gVb0R8dOvN6fYTLRnWnZyel-YIMRcBsj0GAFVQDXqTA,2622
+google/longrunning/__pycache__/operations_grpc.cpython-312.pyc,,
+google/longrunning/__pycache__/operations_grpc_pb2.cpython-312.pyc,,
+google/longrunning/__pycache__/operations_pb2.cpython-312.pyc,,
+google/longrunning/__pycache__/operations_pb2_grpc.cpython-312.pyc,,
+google/longrunning/__pycache__/operations_proto.cpython-312.pyc,,
+google/longrunning/__pycache__/operations_proto_pb2.cpython-312.pyc,,
+google/longrunning/operations.proto,sha256=a3F1Vl2rsgO481XBr0PKrRghjWu5ZC5IasKeN7v8TEQ,10513
+google/longrunning/operations_grpc.py,sha256=nulj2Z10WF2ggHn3LMJ4IhUbtAmTbISk6zb7RDKJc3c,797
+google/longrunning/operations_grpc_pb2.py,sha256=eUCKbAgcHJRKT1Dq1iud7y9SacGX1B96g4sX-UJiMfk,914
+google/longrunning/operations_pb2.py,sha256=GEezQBLVw5LvEZYwq_V7zptQJLL9gWs3dUNqHDotHK8,2253
+google/longrunning/operations_pb2_grpc.py,sha256=XCmhWtnahuW6TIfekqggTAChF49ZF7TU6kY3j7Wgqk8,14464
+google/longrunning/operations_proto.py,sha256=ZXPIBp7WWZoZ9wn_Dr5UBi7XDYduodBqMlihm30f6NM,222
+google/longrunning/operations_proto_pb2.py,sha256=j6hATmAEzprWY1GR_VzCOUaTH9FA9AJDWbHGEp9R7EA,10443
+google/rpc/__pycache__/code_pb2.cpython-312.pyc,,
+google/rpc/__pycache__/error_details_pb2.cpython-312.pyc,,
+google/rpc/__pycache__/http_pb2.cpython-312.pyc,,
+google/rpc/__pycache__/status_pb2.cpython-312.pyc,,
+google/rpc/code.proto,sha256=hmoQd4S8If7XkY63aNBVSgpzzoKz_k-uvf3BvONgl3M,7138
+google/rpc/code_pb2.py,sha256=vvDGSq77csBfVJRLj0BUfvM5ZFiVozaFe6DKR1996z0,2707
+google/rpc/context/__pycache__/attribute_context_pb2.cpython-312.pyc,,
+google/rpc/context/__pycache__/audit_context_pb2.cpython-312.pyc,,
+google/rpc/context/attribute_context.proto,sha256=5tSRA8ggvbpnOn2jhyCxklUaHqQOeobupD7dUCoGyVo,14852
+google/rpc/context/attribute_context_pb2.py,sha256=e0PIy8ksorphcJR9bGw4ai43KPZxCr-ofa2fODBtPjY,14660
+google/rpc/context/audit_context.proto,sha256=R80PkNeRDMRJ-D9hvgi8SxYA_S0EVoHrbBbaXkYAI90,1861
+google/rpc/context/audit_context_pb2.py,sha256=4vbYr_XFfwNHqpo9c7fzvtHFHiDvq1KihMwKlKz8rvU,2603
+google/rpc/error_details.proto,sha256=j5dPUn_1MBQ_FNTwbpewLDFFKXWGnd6EPjHMJPj50Ns,10869
+google/rpc/error_details_pb2.py,sha256=0kiN6fc94jQvsaeMy3s7S-b3EhsM46yXY2WmN6TA78s,11094
+google/rpc/http.proto,sha256=PKlTG0AmdqGUfGi3shoTlm-DCH2dAl62rSBL2X41uL0,1940
+google/rpc/http_pb2.py,sha256=5bNcOVfY8tlHEp_JFozfQe2xRIaEbDuWc2udr1eqK1g,3404
+google/rpc/status.proto,sha256=0b5oDhU-v4IIDAHEHd9ArG3lreRz_IY5GDu6tbnSR1U,1934
+google/rpc/status_pb2.py,sha256=SGM-WxVgLbnqxCc1JQGmPJKvOpVOVhfiWSg-KrH8Ujs,2302
+google/type/__pycache__/calendar_period_pb2.cpython-312.pyc,,
+google/type/__pycache__/color_pb2.cpython-312.pyc,,
+google/type/__pycache__/date_pb2.cpython-312.pyc,,
+google/type/__pycache__/datetime_pb2.cpython-312.pyc,,
+google/type/__pycache__/dayofweek_pb2.cpython-312.pyc,,
+google/type/__pycache__/decimal_pb2.cpython-312.pyc,,
+google/type/__pycache__/expr_pb2.cpython-312.pyc,,
+google/type/__pycache__/fraction_pb2.cpython-312.pyc,,
+google/type/__pycache__/interval_pb2.cpython-312.pyc,,
+google/type/__pycache__/latlng_pb2.cpython-312.pyc,,
+google/type/__pycache__/localized_text_pb2.cpython-312.pyc,,
+google/type/__pycache__/money_pb2.cpython-312.pyc,,
+google/type/__pycache__/month_pb2.cpython-312.pyc,,
+google/type/__pycache__/phone_number_pb2.cpython-312.pyc,,
+google/type/__pycache__/postal_address_pb2.cpython-312.pyc,,
+google/type/__pycache__/quaternion_pb2.cpython-312.pyc,,
+google/type/__pycache__/timeofday_pb2.cpython-312.pyc,,
+google/type/calendar_period.proto,sha256=Uv20O4lquXuT9Au_bJKJ94UHgD3V9XIZURjtgVIUdUE,1762
+google/type/calendar_period_pb2.py,sha256=hZCpKJsaRPCWGp1vNC5XI2B0IUjiHnoBGeFHI7EwCP4,2343
+google/type/color.proto,sha256=kK9GArFcQ4lj6mkLueoapkOSUprR3lNFpng43YNrUI0,6376
+google/type/color_pb2.py,sha256=IUMmF1mMTBbj6iZtGH0Jocx-duS_T-dSbFIoeUKjiqo,2343
+google/type/date.proto,sha256=BuBfGZRa3GepdFMQ_1cRhpdyio8qJ8BtYkkQdvAOIws,1955
+google/type/date_pb2.py,sha256=hpA97snLnWGhirx7dt7tq3J3MOl4vU909-K0QnfTIVE,2138
+google/type/datetime.proto,sha256=iRLRmPPSRZ5b86o6yjiIsK810RXGysWwSaF08CXtAlU,3905
+google/type/datetime_pb2.py,sha256=PuOi4a1AmmOFWijE417NB1Wh5zJxQKFyz3rLQs5LjrU,3165
+google/type/dayofweek.proto,sha256=56YBXUUcCxXQmPWNg76G3OII-bPLtAjCmvCTkgs6JwY,1204
+google/type/dayofweek_pb2.py,sha256=izQmV3nNwX5BH_1gHTD43hQYZdm1HhqKSFeFevsUH8s,2278
+google/type/decimal.proto,sha256=MHwhtJvxLOmTnmlFr7zjUzUMv-GtKWjt8KjI7h8ocSc,4213
+google/type/decimal_pb2.py,sha256=o3ik8jpNxmHKIQFTt3jQ0yolx7hg6FHhcZBqj9O6bRw,2132
+google/type/expr.proto,sha256=h01oPawn_bZZuE8tazwwU4A2dHd27gDTyp89xO2NbV0,2730
+google/type/expr_pb2.py,sha256=e45VfWwvvEAeJkcZOnLLd-QiuJadyC9isZh4PuL8lDw,2153
+google/type/fraction.proto,sha256=v_UMj40abInG6gzw386a9poFH3R1qs1NL7LZCWHfmIs,1156
+google/type/fraction_pb2.py,sha256=AXyzZFbgOpNsWApJ9gETgtZH6-gbWG7XxIjL-zSBQZU,2166
+google/type/interval.proto,sha256=aL9e8GY36gd30t-e4cDPOVMoGb4t_zXhtx-3vGPa_So,1667
+google/type/interval_pb2.py,sha256=1YWhrG1aII6vPMZO2C9PBLIkbLugFgmXCAAdqiedAvo,2364
+google/type/latlng.proto,sha256=p2KWOD4xxxfxeajCt-yigHRhH0uBmejeAZv_qwFXTV8,1447
+google/type/latlng_pb2.py,sha256=JjEHmRlkz4mtNwjkH2uJ265GHb2zCGn7-P_l1uv-AE0,2144
+google/type/localized_text.proto,sha256=nSlltBLvrTIYY-mF04BFlrqybqtvnUFS6_DqD6SOgbw,1303
+google/type/localized_text_pb2.py,sha256=1PoTaA6FpVLEMdvUnu9Yy2kYpiH3WKP_IW0whK3aWuc,2270
+google/type/money.proto,sha256=c-E8J98shWtC68tlqUWcS_xZ3Z4JzEjhp2W74GufpAY,1603
+google/type/money_pb2.py,sha256=xhpDpw13jLt_pim5l7r6Jx-QHd1TvRQgj1gSyQHTOzA,2151
+google/type/month.proto,sha256=76TvqQEdADPekbSnpQxBJ1P_xBr8o-BI_qG3ByHJIfI,1479
+google/type/month_pb2.py,sha256=nSmTfRZiOGe2NHr5zeUm3bNiGFEW0RAKk-sw1QtIGt8,2398
+google/type/phone_number.proto,sha256=V_GJma9FFGht1pEkCzMG9IL4nd-KQZQWT574PzsGudI,4744
+google/type/phone_number_pb2.py,sha256=phMqj3ouEDRmYoxNZSC9aBzL0M_Ej_-EU9OujIZGm5c,3046
+google/type/postal_address.proto,sha256=_R41zeaO1PcGqfEU4gHlFzUubKJ2SERzxhnvJxTIID0,6235
+google/type/postal_address_pb2.py,sha256=_akaf6XBB_6o8EsS2Lw1BIeJdwEL8rLvvO8_0NMpcrE,2654
+google/type/quaternion.proto,sha256=KfoTNWnwo2LypdBv8rMPHB5pf_kdcI5QPiLXrucdOpw,3791
+google/type/quaternion_pb2.py,sha256=iO651jhNnTaHqcQQkFmIjh0510o3q2RNF5s4Q5Teeps,2261
+google/type/timeofday.proto,sha256=7zMEkJWeYteVVcz3SpM686GFL66hX-aguaqIKZwJv2M,1667
+google/type/timeofday_pb2.py,sha256=mHFQIkY9h2QWrvHFa2AWc6uzco-nF3LoVCcVnjt8XRQ,2266
+googleapis_common_protos-1.62.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+googleapis_common_protos-1.62.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
+googleapis_common_protos-1.62.0.dist-info/METADATA,sha256=1eJiPy1QwDm0kbTuo6wASZF17ha2rdMZVxCrDGM5ms8,1532
+googleapis_common_protos-1.62.0.dist-info/RECORD,,
+googleapis_common_protos-1.62.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+googleapis_common_protos-1.62.0.dist-info/WHEEL,sha256=P2T-6epvtXQ2cBOE_U1K4_noqlJFN3tj15djMgEu4NM,110
+googleapis_common_protos-1.62.0.dist-info/top_level.txt,sha256=_1QvSJIhFAGfxb79D6DhB7SUw2X6T4rwnz_LLrbcD3c,7
diff --git a/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/REQUESTED b/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/WHEEL b/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/WHEEL
new file mode 100644
index 0000000..f31e450
--- /dev/null
+++ b/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.41.3)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/top_level.txt b/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/top_level.txt
new file mode 100644
index 0000000..cb42911
--- /dev/null
+++ b/Lib/site-packages/googleapis_common_protos-1.62.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+google
diff --git a/Lib/site-packages/httplib2-0.22.0.dist-info/INSTALLER b/Lib/site-packages/httplib2-0.22.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/Lib/site-packages/httplib2-0.22.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Lib/site-packages/httplib2-0.22.0.dist-info/LICENSE b/Lib/site-packages/httplib2-0.22.0.dist-info/LICENSE
new file mode 100644
index 0000000..ae38286
--- /dev/null
+++ b/Lib/site-packages/httplib2-0.22.0.dist-info/LICENSE
@@ -0,0 +1,23 @@
+Httplib2 Software License
+
+Copyright (c) 2006 by Joe Gregorio
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of the Software,
+and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/Lib/site-packages/httplib2-0.22.0.dist-info/METADATA b/Lib/site-packages/httplib2-0.22.0.dist-info/METADATA
new file mode 100644
index 0000000..933bfa0
--- /dev/null
+++ b/Lib/site-packages/httplib2-0.22.0.dist-info/METADATA
@@ -0,0 +1,75 @@
+Metadata-Version: 2.1
+Name: httplib2
+Version: 0.22.0
+Summary: A comprehensive HTTP client library.
+Home-page: https://github.com/httplib2/httplib2
+Author: Joe Gregorio
+Author-email: joe@bitworking.org
+License: MIT
+Classifier: Development Status :: 4 - Beta
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Software Development :: Libraries
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
+License-File: LICENSE
+Requires-Dist: pyparsing (<3,>=2.4.2) ; python_version < "3.0"
+Requires-Dist: pyparsing (!=3.0.0,!=3.0.1,!=3.0.2,!=3.0.3,<4,>=2.4.2) ; python_version > "3.0"
+
+
+
+A comprehensive HTTP client library, ``httplib2`` supports many features left out of other HTTP libraries.
+
+**HTTP and HTTPS**
+ HTTPS support is only available if the socket module was compiled with SSL support.
+
+
+**Keep-Alive**
+ Supports HTTP 1.1 Keep-Alive, keeping the socket open and performing multiple requests over the same connection if possible.
+
+
+**Authentication**
+ The following three types of HTTP Authentication are supported. These can be used over both HTTP and HTTPS.
+
+ * Digest
+ * Basic
+ * WSSE
+
+**Caching**
+ The module can optionally operate with a private cache that understands the Cache-Control:
+ header and uses both the ETag and Last-Modified cache validators. Both file system
+ and memcached based caches are supported.
+
+
+**All Methods**
+ The module can handle any HTTP request method, not just GET and POST.
+
+
+**Redirects**
+ Automatically follows 3XX redirects on GETs.
+
+
+**Compression**
+ Handles both 'deflate' and 'gzip' types of compression.
+
+
+**Lost update support**
+ Automatically adds back ETags into PUT requests to resources we have already cached. This implements Section 3.2 of Detecting the Lost Update Problem Using Unreserved Checkout
+
+
+**Unit Tested**
+ A large and growing set of unit tests.
diff --git a/Lib/site-packages/httplib2-0.22.0.dist-info/RECORD b/Lib/site-packages/httplib2-0.22.0.dist-info/RECORD
new file mode 100644
index 0000000..f999a0f
--- /dev/null
+++ b/Lib/site-packages/httplib2-0.22.0.dist-info/RECORD
@@ -0,0 +1,20 @@
+httplib2-0.22.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+httplib2-0.22.0.dist-info/LICENSE,sha256=WJ7sOPct8r4gNxHTuMvs6bkIxef_ALw8q39juunjZrQ,1086
+httplib2-0.22.0.dist-info/METADATA,sha256=KKy58CVIaYnc6oBjD0upeaA1dgTbB6z7JK5I6FmDSEM,2618
+httplib2-0.22.0.dist-info/RECORD,,
+httplib2-0.22.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+httplib2-0.22.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
+httplib2-0.22.0.dist-info/top_level.txt,sha256=BEY8ChKwagUWmu9x8yN9JObJpZKNeWCr1E-sIECb56I,9
+httplib2/__init__.py,sha256=UOzaxGwGweHiLsxKBc39_Ez0N8aDHwAu--TkTbYLWCw,69396
+httplib2/__pycache__/__init__.cpython-312.pyc,,
+httplib2/__pycache__/auth.cpython-312.pyc,,
+httplib2/__pycache__/certs.cpython-312.pyc,,
+httplib2/__pycache__/error.cpython-312.pyc,,
+httplib2/__pycache__/iri2uri.cpython-312.pyc,,
+httplib2/__pycache__/socks.cpython-312.pyc,,
+httplib2/auth.py,sha256=Fcb7KqrqRCpUaGD-5l84nT5F2aU6ore6ujWLk5idK0o,2158
+httplib2/cacerts.txt,sha256=AbmYP54iGeKRQ1APtfQvHlo9wul2jVmznmbTzy2fTV4,137365
+httplib2/certs.py,sha256=guhfjMNhDdKJEyYBb5ZyLxVO5q1I7Y_P-4BG8MniBk8,971
+httplib2/error.py,sha256=GyqPUvZeKdVLq0f3xg0uX4rjtv7jVGJuPerAdyc-jfk,954
+httplib2/iri2uri.py,sha256=PhIzEzeR6C73l7piwrNAJlVvlWgsqxtJTlFeXgznzQo,4153
+httplib2/socks.py,sha256=oaeEOnT2rkTNm6wnn0CSdhWzVaVshnnkAKiP4kxKzzc,19701
diff --git a/Lib/site-packages/httplib2-0.22.0.dist-info/REQUESTED b/Lib/site-packages/httplib2-0.22.0.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/httplib2-0.22.0.dist-info/WHEEL b/Lib/site-packages/httplib2-0.22.0.dist-info/WHEEL
new file mode 100644
index 0000000..1f37c02
--- /dev/null
+++ b/Lib/site-packages/httplib2-0.22.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.40.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/Lib/site-packages/httplib2-0.22.0.dist-info/top_level.txt b/Lib/site-packages/httplib2-0.22.0.dist-info/top_level.txt
new file mode 100644
index 0000000..fb881ec
--- /dev/null
+++ b/Lib/site-packages/httplib2-0.22.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+httplib2
diff --git a/Lib/site-packages/httplib2/__init__.py b/Lib/site-packages/httplib2/__init__.py
new file mode 100644
index 0000000..723a63c
--- /dev/null
+++ b/Lib/site-packages/httplib2/__init__.py
@@ -0,0 +1,1799 @@
+# -*- coding: utf-8 -*-
+"""Small, fast HTTP client library for Python."""
+
+__author__ = "Joe Gregorio (joe@bitworking.org)"
+__copyright__ = "Copyright 2006, Joe Gregorio"
+__contributors__ = [
+ "Thomas Broyer (t.broyer@ltgt.net)",
+ "James Antill",
+ "Xavier Verges Farrero",
+ "Jonathan Feinberg",
+ "Blair Zajac",
+ "Sam Ruby",
+ "Louis Nyffenegger",
+ "Mark Pilgrim",
+ "Alex Yu",
+ "Lai Han",
+]
+__license__ = "MIT"
+__version__ = "0.22.0"
+
+import base64
+import calendar
+import copy
+import email
+import email.feedparser
+from email import header
+import email.message
+import email.utils
+import errno
+from gettext import gettext as _
+import gzip
+from hashlib import md5 as _md5
+from hashlib import sha1 as _sha
+import hmac
+import http.client
+import io
+import os
+import random
+import re
+import socket
+import ssl
+import sys
+import time
+import urllib.parse
+import zlib
+
+try:
+ import socks
+except ImportError:
+ # TODO: remove this fallback and copypasted socksipy module upon py2/3 merge,
+ # idea is to have soft-dependency on any compatible module called socks
+ from . import socks
+from . import auth
+from .error import *
+from .iri2uri import iri2uri
+
+
+def has_timeout(timeout):
+ if hasattr(socket, "_GLOBAL_DEFAULT_TIMEOUT"):
+ return timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT
+ return timeout is not None
+
+
+__all__ = [
+ "debuglevel",
+ "FailedToDecompressContent",
+ "Http",
+ "HttpLib2Error",
+ "ProxyInfo",
+ "RedirectLimit",
+ "RedirectMissingLocation",
+ "Response",
+ "RETRIES",
+ "UnimplementedDigestAuthOptionError",
+ "UnimplementedHmacDigestAuthOptionError",
+]
+
+# The httplib debug level, set to a non-zero value to get debug output
+debuglevel = 0
+
+# A request will be tried 'RETRIES' times if it fails at the socket/connection level.
+RETRIES = 2
+
+
+# Open Items:
+# -----------
+
+# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
+
+# Pluggable cache storage (supports storing the cache in
+# flat files by default. We need a plug-in architecture
+# that can support Berkeley DB and Squid)
+
+# == Known Issues ==
+# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
+# Does not handle Cache-Control: max-stale
+# Does not use Age: headers when calculating cache freshness.
+
+# The number of redirections to follow before giving up.
+# Note that only GET redirects are automatically followed.
+# Will also honor 301 requests by saving that info and never
+# requesting that URI again.
+DEFAULT_MAX_REDIRECTS = 5
+
+# Which headers are hop-by-hop headers by default
+HOP_BY_HOP = [
+ "connection",
+ "keep-alive",
+ "proxy-authenticate",
+ "proxy-authorization",
+ "te",
+ "trailers",
+ "transfer-encoding",
+ "upgrade",
+]
+
+# https://tools.ietf.org/html/rfc7231#section-8.1.3
+SAFE_METHODS = ("GET", "HEAD", "OPTIONS", "TRACE")
+
+# To change, assign to `Http().redirect_codes`
+REDIRECT_CODES = frozenset((300, 301, 302, 303, 307, 308))
+
+
+from httplib2 import certs
+
+CA_CERTS = certs.where()
+
+# PROTOCOL_TLS is python 3.5.3+. PROTOCOL_SSLv23 is deprecated.
+# Both PROTOCOL_TLS and PROTOCOL_SSLv23 are equivalent and means:
+# > Selects the highest protocol version that both the client and server support.
+# > Despite the name, this option can select “TLS” protocols as well as “SSL”.
+# source: https://docs.python.org/3.5/library/ssl.html#ssl.PROTOCOL_SSLv23
+
+# PROTOCOL_TLS_CLIENT is python 3.10.0+. PROTOCOL_TLS is deprecated.
+# > Auto-negotiate the highest protocol version that both the client and server support, and configure the context client-side connections.
+# > The protocol enables CERT_REQUIRED and check_hostname by default.
+# source: https://docs.python.org/3.10/library/ssl.html#ssl.PROTOCOL_TLS
+
+DEFAULT_TLS_VERSION = getattr(ssl, "PROTOCOL_TLS_CLIENT", None) or getattr(ssl, "PROTOCOL_TLS", None) or getattr(ssl, "PROTOCOL_SSLv23")
+
+
+def _build_ssl_context(
+ disable_ssl_certificate_validation,
+ ca_certs,
+ cert_file=None,
+ key_file=None,
+ maximum_version=None,
+ minimum_version=None,
+ key_password=None,
+):
+ if not hasattr(ssl, "SSLContext"):
+ raise RuntimeError("httplib2 requires Python 3.2+ for ssl.SSLContext")
+
+ context = ssl.SSLContext(DEFAULT_TLS_VERSION)
+ # check_hostname and verify_mode should be set in opposite order during disable
+ # https://bugs.python.org/issue31431
+ if disable_ssl_certificate_validation and hasattr(context, "check_hostname"):
+ context.check_hostname = not disable_ssl_certificate_validation
+ context.verify_mode = ssl.CERT_NONE if disable_ssl_certificate_validation else ssl.CERT_REQUIRED
+
+ # SSLContext.maximum_version and SSLContext.minimum_version are python 3.7+.
+ # source: https://docs.python.org/3/library/ssl.html#ssl.SSLContext.maximum_version
+ if maximum_version is not None:
+ if hasattr(context, "maximum_version"):
+ if isinstance(maximum_version, str):
+ maximum_version = getattr(ssl.TLSVersion, maximum_version)
+ context.maximum_version = maximum_version
+ else:
+ raise RuntimeError("setting tls_maximum_version requires Python 3.7 and OpenSSL 1.1 or newer")
+ if minimum_version is not None:
+ if hasattr(context, "minimum_version"):
+ if isinstance(minimum_version, str):
+ minimum_version = getattr(ssl.TLSVersion, minimum_version)
+ context.minimum_version = minimum_version
+ else:
+ raise RuntimeError("setting tls_minimum_version requires Python 3.7 and OpenSSL 1.1 or newer")
+ # check_hostname requires python 3.4+
+ # we will perform the equivalent in HTTPSConnectionWithTimeout.connect() by calling ssl.match_hostname
+ # if check_hostname is not supported.
+ if hasattr(context, "check_hostname"):
+ context.check_hostname = not disable_ssl_certificate_validation
+
+ context.load_verify_locations(ca_certs)
+
+ if cert_file:
+ context.load_cert_chain(cert_file, key_file, key_password)
+
+ return context
+
+
+def _get_end2end_headers(response):
+ hopbyhop = list(HOP_BY_HOP)
+ hopbyhop.extend([x.strip() for x in response.get("connection", "").split(",")])
+ return [header for header in list(response.keys()) if header not in hopbyhop]
+
+
+_missing = object()
+
+
+def _errno_from_exception(e):
+ # TODO python 3.11+ cheap try: return e.errno except AttributeError: pass
+ errno = getattr(e, "errno", _missing)
+ if errno is not _missing:
+ return errno
+
+ # socket.error and common wrap in .args
+ args = getattr(e, "args", None)
+ if args:
+ return _errno_from_exception(args[0])
+
+ # pysocks.ProxyError wraps in .socket_err
+ # https://github.com/httplib2/httplib2/pull/202
+ socket_err = getattr(e, "socket_err", None)
+ if socket_err:
+ return _errno_from_exception(socket_err)
+
+ return None
+
+
+URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
+
+
+def parse_uri(uri):
+ """Parses a URI using the regex given in Appendix B of RFC 3986.
+
+ (scheme, authority, path, query, fragment) = parse_uri(uri)
+ """
+ groups = URI.match(uri).groups()
+ return (groups[1], groups[3], groups[4], groups[6], groups[8])
+
+
+def urlnorm(uri):
+ (scheme, authority, path, query, fragment) = parse_uri(uri)
+ if not scheme or not authority:
+ raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
+ authority = authority.lower()
+ scheme = scheme.lower()
+ if not path:
+ path = "/"
+ # Could do syntax based normalization of the URI before
+ # computing the digest. See Section 6.2.2 of Std 66.
+ request_uri = query and "?".join([path, query]) or path
+ scheme = scheme.lower()
+ defrag_uri = scheme + "://" + authority + request_uri
+ return scheme, authority, request_uri, defrag_uri
+
+
+# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
+re_url_scheme = re.compile(r"^\w+://")
+re_unsafe = re.compile(r"[^\w\-_.()=!]+", re.ASCII)
+
+
+def safename(filename):
+ """Return a filename suitable for the cache.
+ Strips dangerous and common characters to create a filename we
+ can use to store the cache in.
+ """
+ if isinstance(filename, bytes):
+ filename_bytes = filename
+ filename = filename.decode("utf-8")
+ else:
+ filename_bytes = filename.encode("utf-8")
+ filemd5 = _md5(filename_bytes).hexdigest()
+ filename = re_url_scheme.sub("", filename)
+ filename = re_unsafe.sub("", filename)
+
+ # limit length of filename (vital for Windows)
+ # https://github.com/httplib2/httplib2/pull/74
+ # C:\Users\ \AppData\Local\Temp\ ,
+ # 9 chars + max 104 chars + 20 chars + x + 1 + 32 = max 259 chars
+ # Thus max safe filename x = 93 chars. Let it be 90 to make a round sum:
+ filename = filename[:90]
+
+ return ",".join((filename, filemd5))
+
+
+NORMALIZE_SPACE = re.compile(r"(?:\r\n)?[ \t]+")
+
+
+def _normalize_headers(headers):
+ return dict(
+ [
+ (_convert_byte_str(key).lower(), NORMALIZE_SPACE.sub(_convert_byte_str(value), " ").strip(),)
+ for (key, value) in headers.items()
+ ]
+ )
+
+
+def _convert_byte_str(s):
+ if not isinstance(s, str):
+ return str(s, "utf-8")
+ return s
+
+
+def _parse_cache_control(headers):
+ retval = {}
+ if "cache-control" in headers:
+ parts = headers["cache-control"].split(",")
+ parts_with_args = [
+ tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")
+ ]
+ parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
+ retval = dict(parts_with_args + parts_wo_args)
+ return retval
+
+
+# Whether to use a strict mode to parse WWW-Authenticate headers
+# Might lead to bad results in case of ill-formed header value,
+# so disabled by default, falling back to relaxed parsing.
+# Set to true to turn on, useful for testing servers.
+USE_WWW_AUTH_STRICT_PARSING = 0
+
+
+def _entry_disposition(response_headers, request_headers):
+ """Determine freshness from the Date, Expires and Cache-Control headers.
+
+ We don't handle the following:
+
+ 1. Cache-Control: max-stale
+ 2. Age: headers are not used in the calculations.
+
+ Not that this algorithm is simpler than you might think
+ because we are operating as a private (non-shared) cache.
+ This lets us ignore 's-maxage'. We can also ignore
+ 'proxy-invalidate' since we aren't a proxy.
+ We will never return a stale document as
+ fresh as a design decision, and thus the non-implementation
+ of 'max-stale'. This also lets us safely ignore 'must-revalidate'
+ since we operate as if every server has sent 'must-revalidate'.
+ Since we are private we get to ignore both 'public' and
+ 'private' parameters. We also ignore 'no-transform' since
+ we don't do any transformations.
+ The 'no-store' parameter is handled at a higher level.
+ So the only Cache-Control parameters we look at are:
+
+ no-cache
+ only-if-cached
+ max-age
+ min-fresh
+ """
+
+ retval = "STALE"
+ cc = _parse_cache_control(request_headers)
+ cc_response = _parse_cache_control(response_headers)
+
+ if "pragma" in request_headers and request_headers["pragma"].lower().find("no-cache") != -1:
+ retval = "TRANSPARENT"
+ if "cache-control" not in request_headers:
+ request_headers["cache-control"] = "no-cache"
+ elif "no-cache" in cc:
+ retval = "TRANSPARENT"
+ elif "no-cache" in cc_response:
+ retval = "STALE"
+ elif "only-if-cached" in cc:
+ retval = "FRESH"
+ elif "date" in response_headers:
+ date = calendar.timegm(email.utils.parsedate_tz(response_headers["date"]))
+ now = time.time()
+ current_age = max(0, now - date)
+ if "max-age" in cc_response:
+ try:
+ freshness_lifetime = int(cc_response["max-age"])
+ except ValueError:
+ freshness_lifetime = 0
+ elif "expires" in response_headers:
+ expires = email.utils.parsedate_tz(response_headers["expires"])
+ if None == expires:
+ freshness_lifetime = 0
+ else:
+ freshness_lifetime = max(0, calendar.timegm(expires) - date)
+ else:
+ freshness_lifetime = 0
+ if "max-age" in cc:
+ try:
+ freshness_lifetime = int(cc["max-age"])
+ except ValueError:
+ freshness_lifetime = 0
+ if "min-fresh" in cc:
+ try:
+ min_fresh = int(cc["min-fresh"])
+ except ValueError:
+ min_fresh = 0
+ current_age += min_fresh
+ if freshness_lifetime > current_age:
+ retval = "FRESH"
+ return retval
+
+
+def _decompressContent(response, new_content):
+ content = new_content
+ try:
+ encoding = response.get("content-encoding", None)
+ if encoding in ["gzip", "deflate"]:
+ if encoding == "gzip":
+ content = gzip.GzipFile(fileobj=io.BytesIO(new_content)).read()
+ if encoding == "deflate":
+ try:
+ content = zlib.decompress(content, zlib.MAX_WBITS)
+ except (IOError, zlib.error):
+ content = zlib.decompress(content, -zlib.MAX_WBITS)
+ response["content-length"] = str(len(content))
+ # Record the historical presence of the encoding in a way the won't interfere.
+ response["-content-encoding"] = response["content-encoding"]
+ del response["content-encoding"]
+ except (IOError, zlib.error):
+ content = ""
+ raise FailedToDecompressContent(
+ _("Content purported to be compressed with %s but failed to decompress.") % response.get("content-encoding"),
+ response,
+ content,
+ )
+ return content
+
+
+def _bind_write_headers(msg):
+ def _write_headers(self):
+ # Self refers to the Generator object.
+ for h, v in msg.items():
+ print("%s:" % h, end=" ", file=self._fp)
+ if isinstance(v, header.Header):
+ print(v.encode(maxlinelen=self._maxheaderlen), file=self._fp)
+ else:
+ # email.Header got lots of smarts, so use it.
+ headers = header.Header(v, maxlinelen=self._maxheaderlen, charset="utf-8", header_name=h)
+ print(headers.encode(), file=self._fp)
+ # A blank line always separates headers from body.
+ print(file=self._fp)
+
+ return _write_headers
+
+
+def _updateCache(request_headers, response_headers, content, cache, cachekey):
+ if cachekey:
+ cc = _parse_cache_control(request_headers)
+ cc_response = _parse_cache_control(response_headers)
+ if "no-store" in cc or "no-store" in cc_response:
+ cache.delete(cachekey)
+ else:
+ info = email.message.Message()
+ for key, value in response_headers.items():
+ if key not in ["status", "content-encoding", "transfer-encoding"]:
+ info[key] = value
+
+ # Add annotations to the cache to indicate what headers
+ # are variant for this request.
+ vary = response_headers.get("vary", None)
+ if vary:
+ vary_headers = vary.lower().replace(" ", "").split(",")
+ for header in vary_headers:
+ key = "-varied-%s" % header
+ try:
+ info[key] = request_headers[header]
+ except KeyError:
+ pass
+
+ status = response_headers.status
+ if status == 304:
+ status = 200
+
+ status_header = "status: %d\r\n" % status
+
+ try:
+ header_str = info.as_string()
+ except UnicodeEncodeError:
+ setattr(info, "_write_headers", _bind_write_headers(info))
+ header_str = info.as_string()
+
+ header_str = re.sub("\r(?!\n)|(? 0:
+ service = "cl"
+ # No point in guessing Base or Spreadsheet
+ # elif request_uri.find("spreadsheets") > 0:
+ # service = "wise"
+
+ auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers["user-agent"],)
+ resp, content = self.http.request(
+ "https://www.google.com/accounts/ClientLogin",
+ method="POST",
+ body=urlencode(auth),
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ )
+ lines = content.split("\n")
+ d = dict([tuple(line.split("=", 1)) for line in lines if line])
+ if resp.status == 403:
+ self.Auth = ""
+ else:
+ self.Auth = d["Auth"]
+
+ def request(self, method, request_uri, headers, content):
+ """Modify the request headers to add the appropriate
+ Authorization header."""
+ headers["authorization"] = "GoogleLogin Auth=" + self.Auth
+
+
+AUTH_SCHEME_CLASSES = {
+ "basic": BasicAuthentication,
+ "wsse": WsseAuthentication,
+ "digest": DigestAuthentication,
+ "hmacdigest": HmacDigestAuthentication,
+ "googlelogin": GoogleLoginAuthentication,
+}
+
+AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
+
+
+class FileCache(object):
+ """Uses a local directory as a store for cached files.
+ Not really safe to use if multiple threads or processes are going to
+ be running on the same cache.
+ """
+
+ def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
+ self.cache = cache
+ self.safe = safe
+ if not os.path.exists(cache):
+ os.makedirs(self.cache)
+
+ def get(self, key):
+ retval = None
+ cacheFullPath = os.path.join(self.cache, self.safe(key))
+ try:
+ f = open(cacheFullPath, "rb")
+ retval = f.read()
+ f.close()
+ except IOError:
+ pass
+ return retval
+
+ def set(self, key, value):
+ cacheFullPath = os.path.join(self.cache, self.safe(key))
+ f = open(cacheFullPath, "wb")
+ f.write(value)
+ f.close()
+
+ def delete(self, key):
+ cacheFullPath = os.path.join(self.cache, self.safe(key))
+ if os.path.exists(cacheFullPath):
+ os.remove(cacheFullPath)
+
+
+class Credentials(object):
+ def __init__(self):
+ self.credentials = []
+
+ def add(self, name, password, domain=""):
+ self.credentials.append((domain.lower(), name, password))
+
+ def clear(self):
+ self.credentials = []
+
+ def iter(self, domain):
+ for (cdomain, name, password) in self.credentials:
+ if cdomain == "" or domain == cdomain:
+ yield (name, password)
+
+
+class KeyCerts(Credentials):
+ """Identical to Credentials except that
+ name/password are mapped to key/cert."""
+
+ def add(self, key, cert, domain, password):
+ self.credentials.append((domain.lower(), key, cert, password))
+
+ def iter(self, domain):
+ for (cdomain, key, cert, password) in self.credentials:
+ if cdomain == "" or domain == cdomain:
+ yield (key, cert, password)
+
+
+class AllHosts(object):
+ pass
+
+
+class ProxyInfo(object):
+ """Collect information required to use a proxy."""
+
+ bypass_hosts = ()
+
+ def __init__(
+ self, proxy_type, proxy_host, proxy_port, proxy_rdns=True, proxy_user=None, proxy_pass=None, proxy_headers=None,
+ ):
+ """Args:
+
+ proxy_type: The type of proxy server. This must be set to one of
+ socks.PROXY_TYPE_XXX constants. For example: p =
+ ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost',
+ proxy_port=8000)
+ proxy_host: The hostname or IP address of the proxy server.
+ proxy_port: The port that the proxy server is running on.
+ proxy_rdns: If True (default), DNS queries will not be performed
+ locally, and instead, handed to the proxy to resolve. This is useful
+ if the network does not allow resolution of non-local names. In
+ httplib2 0.9 and earlier, this defaulted to False.
+ proxy_user: The username used to authenticate with the proxy server.
+ proxy_pass: The password used to authenticate with the proxy server.
+ proxy_headers: Additional or modified headers for the proxy connect
+ request.
+ """
+ if isinstance(proxy_user, bytes):
+ proxy_user = proxy_user.decode()
+ if isinstance(proxy_pass, bytes):
+ proxy_pass = proxy_pass.decode()
+ (
+ self.proxy_type,
+ self.proxy_host,
+ self.proxy_port,
+ self.proxy_rdns,
+ self.proxy_user,
+ self.proxy_pass,
+ self.proxy_headers,
+ ) = (
+ proxy_type,
+ proxy_host,
+ proxy_port,
+ proxy_rdns,
+ proxy_user,
+ proxy_pass,
+ proxy_headers,
+ )
+
+ def astuple(self):
+ return (
+ self.proxy_type,
+ self.proxy_host,
+ self.proxy_port,
+ self.proxy_rdns,
+ self.proxy_user,
+ self.proxy_pass,
+ self.proxy_headers,
+ )
+
+ def isgood(self):
+ return socks and (self.proxy_host != None) and (self.proxy_port != None)
+
+ def applies_to(self, hostname):
+ return not self.bypass_host(hostname)
+
+ def bypass_host(self, hostname):
+ """Has this host been excluded from the proxy config"""
+ if self.bypass_hosts is AllHosts:
+ return True
+
+ hostname = "." + hostname.lstrip(".")
+ for skip_name in self.bypass_hosts:
+ # *.suffix
+ if skip_name.startswith(".") and hostname.endswith(skip_name):
+ return True
+ # exact match
+ if hostname == "." + skip_name:
+ return True
+ return False
+
+ def __repr__(self):
+ return (
+ ""
+ ).format(p=self)
+
+
+def proxy_info_from_environment(method="http"):
+ """Read proxy info from the environment variables.
+ """
+ if method not in ("http", "https"):
+ return
+
+ env_var = method + "_proxy"
+ url = os.environ.get(env_var, os.environ.get(env_var.upper()))
+ if not url:
+ return
+ return proxy_info_from_url(url, method, noproxy=None)
+
+
+def proxy_info_from_url(url, method="http", noproxy=None):
+ """Construct a ProxyInfo from a URL (such as http_proxy env var)
+ """
+ url = urllib.parse.urlparse(url)
+
+ proxy_type = 3 # socks.PROXY_TYPE_HTTP
+ pi = ProxyInfo(
+ proxy_type=proxy_type,
+ proxy_host=url.hostname,
+ proxy_port=url.port or dict(https=443, http=80)[method],
+ proxy_user=url.username or None,
+ proxy_pass=url.password or None,
+ proxy_headers=None,
+ )
+
+ bypass_hosts = []
+ # If not given an explicit noproxy value, respect values in env vars.
+ if noproxy is None:
+ noproxy = os.environ.get("no_proxy", os.environ.get("NO_PROXY", ""))
+ # Special case: A single '*' character means all hosts should be bypassed.
+ if noproxy == "*":
+ bypass_hosts = AllHosts
+ elif noproxy.strip():
+ bypass_hosts = noproxy.split(",")
+ bypass_hosts = tuple(filter(bool, bypass_hosts)) # To exclude empty string.
+
+ pi.bypass_hosts = bypass_hosts
+ return pi
+
+
+class HTTPConnectionWithTimeout(http.client.HTTPConnection):
+ """HTTPConnection subclass that supports timeouts
+
+ HTTPConnection subclass that supports timeouts
+
+ All timeouts are in seconds. If None is passed for timeout then
+ Python's default timeout for sockets will be used. See for example
+ the docs of socket.setdefaulttimeout():
+ http://docs.python.org/library/socket.html#socket.setdefaulttimeout
+ """
+
+ def __init__(self, host, port=None, timeout=None, proxy_info=None):
+ http.client.HTTPConnection.__init__(self, host, port=port, timeout=timeout)
+
+ self.proxy_info = proxy_info
+ if proxy_info and not isinstance(proxy_info, ProxyInfo):
+ self.proxy_info = proxy_info("http")
+
+ def connect(self):
+ """Connect to the host and port specified in __init__."""
+ if self.proxy_info and socks is None:
+ raise ProxiesUnavailableError("Proxy support missing but proxy use was requested!")
+ if self.proxy_info and self.proxy_info.isgood() and self.proxy_info.applies_to(self.host):
+ use_proxy = True
+ (
+ proxy_type,
+ proxy_host,
+ proxy_port,
+ proxy_rdns,
+ proxy_user,
+ proxy_pass,
+ proxy_headers,
+ ) = self.proxy_info.astuple()
+
+ host = proxy_host
+ port = proxy_port
+ else:
+ use_proxy = False
+
+ host = self.host
+ port = self.port
+ proxy_type = None
+
+ socket_err = None
+
+ for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
+ af, socktype, proto, canonname, sa = res
+ try:
+ if use_proxy:
+ self.sock = socks.socksocket(af, socktype, proto)
+ self.sock.setproxy(
+ proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass,
+ )
+ else:
+ self.sock = socket.socket(af, socktype, proto)
+ self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+ if has_timeout(self.timeout):
+ self.sock.settimeout(self.timeout)
+ if self.debuglevel > 0:
+ print("connect: ({0}, {1}) ************".format(self.host, self.port))
+ if use_proxy:
+ print(
+ "proxy: {0} ************".format(
+ str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,))
+ )
+ )
+
+ self.sock.connect((self.host, self.port) + sa[2:])
+ except socket.error as e:
+ socket_err = e
+ if self.debuglevel > 0:
+ print("connect fail: ({0}, {1})".format(self.host, self.port))
+ if use_proxy:
+ print(
+ "proxy: {0}".format(
+ str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,))
+ )
+ )
+ if self.sock:
+ self.sock.close()
+ self.sock = None
+ continue
+ break
+ if not self.sock:
+ raise socket_err
+
+
+class HTTPSConnectionWithTimeout(http.client.HTTPSConnection):
+ """This class allows communication via SSL.
+
+ All timeouts are in seconds. If None is passed for timeout then
+ Python's default timeout for sockets will be used. See for example
+ the docs of socket.setdefaulttimeout():
+ http://docs.python.org/library/socket.html#socket.setdefaulttimeout
+ """
+
+ def __init__(
+ self,
+ host,
+ port=None,
+ key_file=None,
+ cert_file=None,
+ timeout=None,
+ proxy_info=None,
+ ca_certs=None,
+ disable_ssl_certificate_validation=False,
+ tls_maximum_version=None,
+ tls_minimum_version=None,
+ key_password=None,
+ ):
+
+ self.disable_ssl_certificate_validation = disable_ssl_certificate_validation
+ self.ca_certs = ca_certs if ca_certs else CA_CERTS
+
+ self.proxy_info = proxy_info
+ if proxy_info and not isinstance(proxy_info, ProxyInfo):
+ self.proxy_info = proxy_info("https")
+
+ context = _build_ssl_context(
+ self.disable_ssl_certificate_validation,
+ self.ca_certs,
+ cert_file,
+ key_file,
+ maximum_version=tls_maximum_version,
+ minimum_version=tls_minimum_version,
+ key_password=key_password,
+ )
+ super(HTTPSConnectionWithTimeout, self).__init__(
+ host, port=port, timeout=timeout, context=context,
+ )
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.key_password = key_password
+
+ def connect(self):
+ """Connect to a host on a given (SSL) port."""
+ if self.proxy_info and self.proxy_info.isgood() and self.proxy_info.applies_to(self.host):
+ use_proxy = True
+ (
+ proxy_type,
+ proxy_host,
+ proxy_port,
+ proxy_rdns,
+ proxy_user,
+ proxy_pass,
+ proxy_headers,
+ ) = self.proxy_info.astuple()
+
+ host = proxy_host
+ port = proxy_port
+ else:
+ use_proxy = False
+
+ host = self.host
+ port = self.port
+ proxy_type = None
+ proxy_headers = None
+
+ socket_err = None
+
+ address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
+ for family, socktype, proto, canonname, sockaddr in address_info:
+ try:
+ if use_proxy:
+ sock = socks.socksocket(family, socktype, proto)
+
+ sock.setproxy(
+ proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass,
+ )
+ else:
+ sock = socket.socket(family, socktype, proto)
+ sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+ if has_timeout(self.timeout):
+ sock.settimeout(self.timeout)
+ sock.connect((self.host, self.port))
+
+ self.sock = self._context.wrap_socket(sock, server_hostname=self.host)
+
+ # Python 3.3 compatibility: emulate the check_hostname behavior
+ if not hasattr(self._context, "check_hostname") and not self.disable_ssl_certificate_validation:
+ try:
+ ssl.match_hostname(self.sock.getpeercert(), self.host)
+ except Exception:
+ self.sock.shutdown(socket.SHUT_RDWR)
+ self.sock.close()
+ raise
+
+ if self.debuglevel > 0:
+ print("connect: ({0}, {1})".format(self.host, self.port))
+ if use_proxy:
+ print(
+ "proxy: {0}".format(
+ str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,))
+ )
+ )
+ except (ssl.SSLError, ssl.CertificateError) as e:
+ if sock:
+ sock.close()
+ if self.sock:
+ self.sock.close()
+ self.sock = None
+ raise
+ except (socket.timeout, socket.gaierror):
+ raise
+ except socket.error as e:
+ socket_err = e
+ if self.debuglevel > 0:
+ print("connect fail: ({0}, {1})".format(self.host, self.port))
+ if use_proxy:
+ print(
+ "proxy: {0}".format(
+ str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,))
+ )
+ )
+ if self.sock:
+ self.sock.close()
+ self.sock = None
+ continue
+ break
+ if not self.sock:
+ raise socket_err
+
+
+SCHEME_TO_CONNECTION = {
+ "http": HTTPConnectionWithTimeout,
+ "https": HTTPSConnectionWithTimeout,
+}
+
+
+class Http(object):
+ """An HTTP client that handles:
+
+ - all methods
+ - caching
+ - ETags
+ - compression,
+ - HTTPS
+ - Basic
+ - Digest
+ - WSSE
+
+ and more.
+ """
+
+ def __init__(
+ self,
+ cache=None,
+ timeout=None,
+ proxy_info=proxy_info_from_environment,
+ ca_certs=None,
+ disable_ssl_certificate_validation=False,
+ tls_maximum_version=None,
+ tls_minimum_version=None,
+ ):
+ """If 'cache' is a string then it is used as a directory name for
+ a disk cache. Otherwise it must be an object that supports the
+ same interface as FileCache.
+
+ All timeouts are in seconds. If None is passed for timeout
+ then Python's default timeout for sockets will be used. See
+ for example the docs of socket.setdefaulttimeout():
+ http://docs.python.org/library/socket.html#socket.setdefaulttimeout
+
+ `proxy_info` may be:
+ - a callable that takes the http scheme ('http' or 'https') and
+ returns a ProxyInfo instance per request. By default, uses
+ proxy_info_from_environment.
+ - a ProxyInfo instance (static proxy config).
+ - None (proxy disabled).
+
+ ca_certs is the path of a file containing root CA certificates for SSL
+ server certificate validation. By default, a CA cert file bundled with
+ httplib2 is used.
+
+ If disable_ssl_certificate_validation is true, SSL cert validation will
+ not be performed.
+
+ tls_maximum_version / tls_minimum_version require Python 3.7+ /
+ OpenSSL 1.1.0g+. A value of "TLSv1_3" requires OpenSSL 1.1.1+.
+ """
+ self.proxy_info = proxy_info
+ self.ca_certs = ca_certs
+ self.disable_ssl_certificate_validation = disable_ssl_certificate_validation
+ self.tls_maximum_version = tls_maximum_version
+ self.tls_minimum_version = tls_minimum_version
+ # Map domain name to an httplib connection
+ self.connections = {}
+ # The location of the cache, for now a directory
+ # where cached responses are held.
+ if cache and isinstance(cache, str):
+ self.cache = FileCache(cache)
+ else:
+ self.cache = cache
+
+ # Name/password
+ self.credentials = Credentials()
+
+ # Key/cert
+ self.certificates = KeyCerts()
+
+ # authorization objects
+ self.authorizations = []
+
+ # If set to False then no redirects are followed, even safe ones.
+ self.follow_redirects = True
+
+ self.redirect_codes = REDIRECT_CODES
+
+ # Which HTTP methods do we apply optimistic concurrency to, i.e.
+ # which methods get an "if-match:" etag header added to them.
+ self.optimistic_concurrency_methods = ["PUT", "PATCH"]
+
+ self.safe_methods = list(SAFE_METHODS)
+
+ # If 'follow_redirects' is True, and this is set to True then
+ # all redirecs are followed, including unsafe ones.
+ self.follow_all_redirects = False
+
+ self.ignore_etag = False
+
+ self.force_exception_to_status_code = False
+
+ self.timeout = timeout
+
+ # Keep Authorization: headers on a redirect.
+ self.forward_authorization_headers = False
+
+ def close(self):
+ """Close persistent connections, clear sensitive data.
+ Not thread-safe, requires external synchronization against concurrent requests.
+ """
+ existing, self.connections = self.connections, {}
+ for _, c in existing.items():
+ c.close()
+ self.certificates.clear()
+ self.clear_credentials()
+
+ def __getstate__(self):
+ state_dict = copy.copy(self.__dict__)
+ # In case request is augmented by some foreign object such as
+ # credentials which handle auth
+ if "request" in state_dict:
+ del state_dict["request"]
+ if "connections" in state_dict:
+ del state_dict["connections"]
+ return state_dict
+
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+ self.connections = {}
+
+ def _auth_from_challenge(self, host, request_uri, headers, response, content):
+ """A generator that creates Authorization objects
+ that can be applied to requests.
+ """
+ challenges = auth._parse_www_authenticate(response, "www-authenticate")
+ for cred in self.credentials.iter(host):
+ for scheme in AUTH_SCHEME_ORDER:
+ if scheme in challenges:
+ yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
+
+ def add_credentials(self, name, password, domain=""):
+ """Add a name and password that will be used
+ any time a request requires authentication."""
+ self.credentials.add(name, password, domain)
+
+ def add_certificate(self, key, cert, domain, password=None):
+ """Add a key and cert that will be used
+ any time a request requires authentication."""
+ self.certificates.add(key, cert, domain, password)
+
+ def clear_credentials(self):
+ """Remove all the names and passwords
+ that are used for authentication"""
+ self.credentials.clear()
+ self.authorizations = []
+
+ def _conn_request(self, conn, request_uri, method, body, headers):
+ i = 0
+ seen_bad_status_line = False
+ while i < RETRIES:
+ i += 1
+ try:
+ if conn.sock is None:
+ conn.connect()
+ conn.request(method, request_uri, body, headers)
+ except socket.timeout:
+ conn.close()
+ raise
+ except socket.gaierror:
+ conn.close()
+ raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
+ except socket.error as e:
+ errno_ = _errno_from_exception(e)
+ if errno_ in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES:
+ continue # retry on potentially transient errors
+ raise
+ except http.client.HTTPException:
+ if conn.sock is None:
+ if i < RETRIES - 1:
+ conn.close()
+ conn.connect()
+ continue
+ else:
+ conn.close()
+ raise
+ if i < RETRIES - 1:
+ conn.close()
+ conn.connect()
+ continue
+ # Just because the server closed the connection doesn't apparently mean
+ # that the server didn't send a response.
+ pass
+ try:
+ response = conn.getresponse()
+ except (http.client.BadStatusLine, http.client.ResponseNotReady):
+ # If we get a BadStatusLine on the first try then that means
+ # the connection just went stale, so retry regardless of the
+ # number of RETRIES set.
+ if not seen_bad_status_line and i == 1:
+ i = 0
+ seen_bad_status_line = True
+ conn.close()
+ conn.connect()
+ continue
+ else:
+ conn.close()
+ raise
+ except socket.timeout:
+ raise
+ except (socket.error, http.client.HTTPException):
+ conn.close()
+ if i == 0:
+ conn.close()
+ conn.connect()
+ continue
+ else:
+ raise
+ else:
+ content = b""
+ if method == "HEAD":
+ conn.close()
+ else:
+ content = response.read()
+ response = Response(response)
+ if method != "HEAD":
+ content = _decompressContent(response, content)
+
+ break
+ return (response, content)
+
+ def _request(
+ self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey,
+ ):
+ """Do the actual request using the connection object
+ and also follow one level of redirects if necessary"""
+
+ auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
+ auth = auths and sorted(auths)[0][1] or None
+ if auth:
+ auth.request(method, request_uri, headers, body)
+
+ (response, content) = self._conn_request(conn, request_uri, method, body, headers)
+
+ if auth:
+ if auth.response(response, body):
+ auth.request(method, request_uri, headers, body)
+ (response, content) = self._conn_request(conn, request_uri, method, body, headers)
+ response._stale_digest = 1
+
+ if response.status == 401:
+ for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
+ authorization.request(method, request_uri, headers, body)
+ (response, content) = self._conn_request(conn, request_uri, method, body, headers)
+ if response.status != 401:
+ self.authorizations.append(authorization)
+ authorization.response(response, body)
+ break
+
+ if self.follow_all_redirects or method in self.safe_methods or response.status in (303, 308):
+ if self.follow_redirects and response.status in self.redirect_codes:
+ # Pick out the location header and basically start from the beginning
+ # remembering first to strip the ETag header and decrement our 'depth'
+ if redirections:
+ if "location" not in response and response.status != 300:
+ raise RedirectMissingLocation(
+ _("Redirected but the response is missing a Location: header."), response, content,
+ )
+ # Fix-up relative redirects (which violate an RFC 2616 MUST)
+ if "location" in response:
+ location = response["location"]
+ (scheme, authority, path, query, fragment) = parse_uri(location)
+ if authority == None:
+ response["location"] = urllib.parse.urljoin(absolute_uri, location)
+ if response.status == 308 or (response.status == 301 and (method in self.safe_methods)):
+ response["-x-permanent-redirect-url"] = response["location"]
+ if "content-location" not in response:
+ response["content-location"] = absolute_uri
+ _updateCache(headers, response, content, self.cache, cachekey)
+ if "if-none-match" in headers:
+ del headers["if-none-match"]
+ if "if-modified-since" in headers:
+ del headers["if-modified-since"]
+ if "authorization" in headers and not self.forward_authorization_headers:
+ del headers["authorization"]
+ if "location" in response:
+ location = response["location"]
+ old_response = copy.deepcopy(response)
+ if "content-location" not in old_response:
+ old_response["content-location"] = absolute_uri
+ redirect_method = method
+ if response.status in [302, 303]:
+ redirect_method = "GET"
+ body = None
+ (response, content) = self.request(
+ location, method=redirect_method, body=body, headers=headers, redirections=redirections - 1,
+ )
+ response.previous = old_response
+ else:
+ raise RedirectLimit(
+ "Redirected more times than redirection_limit allows.", response, content,
+ )
+ elif response.status in [200, 203] and method in self.safe_methods:
+ # Don't cache 206's since we aren't going to handle byte range requests
+ if "content-location" not in response:
+ response["content-location"] = absolute_uri
+ _updateCache(headers, response, content, self.cache, cachekey)
+
+ return (response, content)
+
+ def _normalize_headers(self, headers):
+ return _normalize_headers(headers)
+
+ # Need to catch and rebrand some exceptions
+ # Then need to optionally turn all exceptions into status codes
+ # including all socket.* and httplib.* exceptions.
+
+ def request(
+ self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None,
+ ):
+ """ Performs a single HTTP request.
+The 'uri' is the URI of the HTTP resource and can begin
+with either 'http' or 'https'. The value of 'uri' must be an absolute URI.
+
+The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc.
+There is no restriction on the methods allowed.
+
+The 'body' is the entity body to be sent with the request. It is a string
+object.
+
+Any extra headers that are to be sent with the request should be provided in the
+'headers' dictionary.
+
+The maximum number of redirect to follow before raising an
+exception is 'redirections. The default is 5.
+
+The return value is a tuple of (response, content), the first
+being and instance of the 'Response' class, the second being
+a string that contains the response entity body.
+ """
+ conn_key = ""
+
+ try:
+ if headers is None:
+ headers = {}
+ else:
+ headers = self._normalize_headers(headers)
+
+ if "user-agent" not in headers:
+ headers["user-agent"] = "Python-httplib2/%s (gzip)" % __version__
+
+ uri = iri2uri(uri)
+ # Prevent CWE-75 space injection to manipulate request via part of uri.
+ # Prevent CWE-93 CRLF injection to modify headers via part of uri.
+ uri = uri.replace(" ", "%20").replace("\r", "%0D").replace("\n", "%0A")
+
+ (scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
+
+ conn_key = scheme + ":" + authority
+ conn = self.connections.get(conn_key)
+ if conn is None:
+ if not connection_type:
+ connection_type = SCHEME_TO_CONNECTION[scheme]
+ certs = list(self.certificates.iter(authority))
+ if issubclass(connection_type, HTTPSConnectionWithTimeout):
+ if certs:
+ conn = self.connections[conn_key] = connection_type(
+ authority,
+ key_file=certs[0][0],
+ cert_file=certs[0][1],
+ timeout=self.timeout,
+ proxy_info=self.proxy_info,
+ ca_certs=self.ca_certs,
+ disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,
+ tls_maximum_version=self.tls_maximum_version,
+ tls_minimum_version=self.tls_minimum_version,
+ key_password=certs[0][2],
+ )
+ else:
+ conn = self.connections[conn_key] = connection_type(
+ authority,
+ timeout=self.timeout,
+ proxy_info=self.proxy_info,
+ ca_certs=self.ca_certs,
+ disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,
+ tls_maximum_version=self.tls_maximum_version,
+ tls_minimum_version=self.tls_minimum_version,
+ )
+ else:
+ conn = self.connections[conn_key] = connection_type(
+ authority, timeout=self.timeout, proxy_info=self.proxy_info
+ )
+ conn.set_debuglevel(debuglevel)
+
+ if "range" not in headers and "accept-encoding" not in headers:
+ headers["accept-encoding"] = "gzip, deflate"
+
+ info = email.message.Message()
+ cachekey = None
+ cached_value = None
+ if self.cache:
+ cachekey = defrag_uri
+ cached_value = self.cache.get(cachekey)
+ if cached_value:
+ try:
+ info, content = cached_value.split(b"\r\n\r\n", 1)
+ info = email.message_from_bytes(info)
+ for k, v in info.items():
+ if v.startswith("=?") and v.endswith("?="):
+ info.replace_header(k, str(*email.header.decode_header(v)[0]))
+ except (IndexError, ValueError):
+ self.cache.delete(cachekey)
+ cachekey = None
+ cached_value = None
+
+ if (
+ method in self.optimistic_concurrency_methods
+ and self.cache
+ and "etag" in info
+ and not self.ignore_etag
+ and "if-match" not in headers
+ ):
+ # http://www.w3.org/1999/04/Editing/
+ headers["if-match"] = info["etag"]
+
+ # https://tools.ietf.org/html/rfc7234
+ # A cache MUST invalidate the effective Request URI as well as [...] Location and Content-Location
+ # when a non-error status code is received in response to an unsafe request method.
+ if self.cache and cachekey and method not in self.safe_methods:
+ self.cache.delete(cachekey)
+
+ # Check the vary header in the cache to see if this request
+ # matches what varies in the cache.
+ if method in self.safe_methods and "vary" in info:
+ vary = info["vary"]
+ vary_headers = vary.lower().replace(" ", "").split(",")
+ for header in vary_headers:
+ key = "-varied-%s" % header
+ value = info[key]
+ if headers.get(header, None) != value:
+ cached_value = None
+ break
+
+ if (
+ self.cache
+ and cached_value
+ and (method in self.safe_methods or info["status"] == "308")
+ and "range" not in headers
+ ):
+ redirect_method = method
+ if info["status"] not in ("307", "308"):
+ redirect_method = "GET"
+ if "-x-permanent-redirect-url" in info:
+ # Should cached permanent redirects be counted in our redirection count? For now, yes.
+ if redirections <= 0:
+ raise RedirectLimit(
+ "Redirected more times than redirection_limit allows.", {}, "",
+ )
+ (response, new_content) = self.request(
+ info["-x-permanent-redirect-url"],
+ method=redirect_method,
+ headers=headers,
+ redirections=redirections - 1,
+ )
+ response.previous = Response(info)
+ response.previous.fromcache = True
+ else:
+ # Determine our course of action:
+ # Is the cached entry fresh or stale?
+ # Has the client requested a non-cached response?
+ #
+ # There seems to be three possible answers:
+ # 1. [FRESH] Return the cache entry w/o doing a GET
+ # 2. [STALE] Do the GET (but add in cache validators if available)
+ # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
+ entry_disposition = _entry_disposition(info, headers)
+
+ if entry_disposition == "FRESH":
+ response = Response(info)
+ response.fromcache = True
+ return (response, content)
+
+ if entry_disposition == "STALE":
+ if "etag" in info and not self.ignore_etag and not "if-none-match" in headers:
+ headers["if-none-match"] = info["etag"]
+ if "last-modified" in info and not "last-modified" in headers:
+ headers["if-modified-since"] = info["last-modified"]
+ elif entry_disposition == "TRANSPARENT":
+ pass
+
+ (response, new_content) = self._request(
+ conn, authority, uri, request_uri, method, body, headers, redirections, cachekey,
+ )
+
+ if response.status == 304 and method == "GET":
+ # Rewrite the cache entry with the new end-to-end headers
+ # Take all headers that are in response
+ # and overwrite their values in info.
+ # unless they are hop-by-hop, or are listed in the connection header.
+
+ for key in _get_end2end_headers(response):
+ info[key] = response[key]
+ merged_response = Response(info)
+ if hasattr(response, "_stale_digest"):
+ merged_response._stale_digest = response._stale_digest
+ _updateCache(headers, merged_response, content, self.cache, cachekey)
+ response = merged_response
+ response.status = 200
+ response.fromcache = True
+
+ elif response.status == 200:
+ content = new_content
+ else:
+ self.cache.delete(cachekey)
+ content = new_content
+ else:
+ cc = _parse_cache_control(headers)
+ if "only-if-cached" in cc:
+ info["status"] = "504"
+ response = Response(info)
+ content = b""
+ else:
+ (response, content) = self._request(
+ conn, authority, uri, request_uri, method, body, headers, redirections, cachekey,
+ )
+ except Exception as e:
+ is_timeout = isinstance(e, socket.timeout)
+ if is_timeout:
+ conn = self.connections.pop(conn_key, None)
+ if conn:
+ conn.close()
+
+ if self.force_exception_to_status_code:
+ if isinstance(e, HttpLib2ErrorWithResponse):
+ response = e.response
+ content = e.content
+ response.status = 500
+ response.reason = str(e)
+ elif isinstance(e, socket.timeout):
+ content = b"Request Timeout"
+ response = Response({"content-type": "text/plain", "status": "408", "content-length": len(content),})
+ response.reason = "Request Timeout"
+ else:
+ content = str(e).encode("utf-8")
+ response = Response({"content-type": "text/plain", "status": "400", "content-length": len(content),})
+ response.reason = "Bad Request"
+ else:
+ raise
+
+ return (response, content)
+
+
+class Response(dict):
+ """An object more like email.message than httplib.HTTPResponse."""
+
+ """Is this response from our local cache"""
+ fromcache = False
+ """HTTP protocol version used by server.
+
+ 10 for HTTP/1.0, 11 for HTTP/1.1.
+ """
+ version = 11
+
+ "Status code returned by server. "
+ status = 200
+ """Reason phrase returned by server."""
+ reason = "Ok"
+
+ previous = None
+
+ def __init__(self, info):
+ # info is either an email.message or
+ # an httplib.HTTPResponse object.
+ if isinstance(info, http.client.HTTPResponse):
+ for key, value in info.getheaders():
+ key = key.lower()
+ prev = self.get(key)
+ if prev is not None:
+ value = ", ".join((prev, value))
+ self[key] = value
+ self.status = info.status
+ self["status"] = str(self.status)
+ self.reason = info.reason
+ self.version = info.version
+ elif isinstance(info, email.message.Message):
+ for key, value in list(info.items()):
+ self[key.lower()] = value
+ self.status = int(self["status"])
+ else:
+ for key, value in info.items():
+ self[key.lower()] = value
+ self.status = int(self.get("status", self.status))
+
+ def __getattr__(self, name):
+ if name == "dict":
+ return self
+ else:
+ raise AttributeError(name)
diff --git a/Lib/site-packages/httplib2/auth.py b/Lib/site-packages/httplib2/auth.py
new file mode 100644
index 0000000..b8028ae
--- /dev/null
+++ b/Lib/site-packages/httplib2/auth.py
@@ -0,0 +1,69 @@
+import base64
+import re
+
+import pyparsing as pp
+
+from .error import *
+
+
+try: # pyparsing>=3.0.0
+ downcaseTokens = pp.common.downcaseTokens
+except AttributeError:
+ downcaseTokens = pp.downcaseTokens
+
+UNQUOTE_PAIRS = re.compile(r"\\(.)")
+unquote = lambda s, l, t: UNQUOTE_PAIRS.sub(r"\1", t[0][1:-1])
+
+# https://tools.ietf.org/html/rfc7235#section-1.2
+# https://tools.ietf.org/html/rfc7235#appendix-B
+tchar = "!#$%&'*+-.^_`|~" + pp.nums + pp.alphas
+token = pp.Word(tchar).setName("token")
+token68 = pp.Combine(pp.Word("-._~+/" + pp.nums + pp.alphas) + pp.Optional(pp.Word("=").leaveWhitespace())).setName(
+ "token68"
+)
+
+quoted_string = pp.dblQuotedString.copy().setName("quoted-string").setParseAction(unquote)
+auth_param_name = token.copy().setName("auth-param-name").addParseAction(downcaseTokens)
+auth_param = auth_param_name + pp.Suppress("=") + (quoted_string | token)
+params = pp.Dict(pp.delimitedList(pp.Group(auth_param)))
+
+scheme = token("scheme")
+challenge = scheme + (params("params") | token68("token"))
+
+authentication_info = params.copy()
+www_authenticate = pp.delimitedList(pp.Group(challenge))
+
+
+def _parse_authentication_info(headers, headername="authentication-info"):
+ """https://tools.ietf.org/html/rfc7615
+ """
+ header = headers.get(headername, "").strip()
+ if not header:
+ return {}
+ try:
+ parsed = authentication_info.parseString(header)
+ except pp.ParseException as ex:
+ # print(ex.explain(ex))
+ raise MalformedHeader(headername)
+
+ return parsed.asDict()
+
+
+def _parse_www_authenticate(headers, headername="www-authenticate"):
+ """Returns a dictionary of dictionaries, one dict per auth_scheme."""
+ header = headers.get(headername, "").strip()
+ if not header:
+ return {}
+ try:
+ parsed = www_authenticate.parseString(header)
+ except pp.ParseException as ex:
+ # print(ex.explain(ex))
+ raise MalformedHeader(headername)
+
+ retval = {
+ challenge["scheme"].lower(): challenge["params"].asDict()
+ if "params" in challenge
+ else {"token": challenge.get("token")}
+ for challenge in parsed
+ }
+ return retval
diff --git a/Lib/site-packages/httplib2/cacerts.txt b/Lib/site-packages/httplib2/cacerts.txt
new file mode 100644
index 0000000..78a444c
--- /dev/null
+++ b/Lib/site-packages/httplib2/cacerts.txt
@@ -0,0 +1,2225 @@
+# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
+# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
+# Label: "GTE CyberTrust Global Root"
+# Serial: 421
+# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db
+# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74
+# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36
+-----BEGIN CERTIFICATE-----
+MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD
+VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv
+bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv
+b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV
+UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
+cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
+b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH
+iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS
+r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4
+04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r
+GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9
+3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P
+lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/
+-----END CERTIFICATE-----
+
+# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Label: "Thawte Server CA"
+# Serial: 1
+# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d
+# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c
+# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9
+-----BEGIN CERTIFICATE-----
+MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
+VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
+biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm
+MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx
+MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT
+DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3
+dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl
+cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3
+DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD
+gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91
+yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX
+L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj
+EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG
+7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e
+QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ
+qdq5snUb9kLy78fyGPmJvKP/iiMucEc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Label: "Thawte Premium Server CA"
+# Serial: 1
+# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a
+# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a
+# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72
+-----BEGIN CERTIFICATE-----
+MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
+VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
+biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy
+dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t
+MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB
+MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG
+A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp
+b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl
+cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv
+bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE
+VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ
+ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR
+uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG
+9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI
+hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM
+pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Equifax OU=Equifax Secure Certificate Authority
+# Subject: O=Equifax OU=Equifax Secure Certificate Authority
+# Label: "Equifax Secure CA"
+# Serial: 903804111
+# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4
+# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a
+# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
+UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy
+dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1
+MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx
+dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B
+AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f
+BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A
+cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC
+AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ
+MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm
+aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw
+ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj
+IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF
+MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
+A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y
+7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh
+1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4
+-----END CERTIFICATE-----
+
+# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
+# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
+# Label: "Verisign Class 3 Public Primary Certification Authority - G2"
+# Serial: 167285380242319648451154478808036881606
+# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9
+# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f
+# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b
+-----BEGIN CERTIFICATE-----
+MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
+BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
+c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
+MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
+emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
+DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
+FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg
+UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
+YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
+MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
+AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4
+pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0
+13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID
+AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk
+U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i
+F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY
+oJ2daZH9
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Label: "GlobalSign Root CA - R2"
+# Serial: 4835703278459682885658125
+# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
+# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
+# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
+-----BEGIN CERTIFICATE-----
+MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
+MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
+v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
+eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
+tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
+C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
+zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
+mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
+V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
+bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
+3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
+J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
+291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
+ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
+AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
+TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
+# Label: "ValiCert Class 1 VA"
+# Serial: 1
+# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb
+# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e
+# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy
+NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y
+LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+
+TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y
+TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0
+LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW
+I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw
+nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
+# Label: "ValiCert Class 2 VA"
+# Serial: 1
+# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87
+# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6
+# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy
+NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY
+dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9
+WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS
+v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v
+UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu
+IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC
+W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
+# Label: "RSA Root Certificate 1"
+# Serial: 1
+# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72
+# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb
+# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy
+NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD
+cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs
+2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY
+JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE
+Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ
+n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A
+PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
+# Serial: 206684696279472310254277870180966723415
+# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
+# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
+# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
+N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
+KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
+kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
+CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
+Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
+imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
+2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
+DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
+/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
+F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
+TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 4 Public Primary Certification Authority - G3"
+# Serial: 314531972711909413743075096039378935511
+# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df
+# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d
+# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1
+GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ
++mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd
+U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm
+NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY
+ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/
+ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1
+CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq
+g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm
+fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c
+2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/
+bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Secure Server CA"
+# Serial: 927650371
+# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee
+# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39
+# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50
+-----BEGIN CERTIFICATE-----
+MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
+VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
+ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
+KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
+ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1
+MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE
+ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j
+b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF
+bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg
+U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA
+A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/
+I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3
+wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC
+AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb
+oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5
+BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p
+dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk
+MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp
+b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu
+dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0
+MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi
+E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa
+MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI
+hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN
+95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd
+2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946059622
+# MD5 Fingerprint: ba:21:ea:20:d6:dd:db:8f:c1:57:8b:40:ad:a1:fc:fc
+# SHA1 Fingerprint: 80:1d:62:d0:7b:44:9d:5c:5c:03:5c:98:ea:61:fa:44:3c:2a:58:fe
+# SHA256 Fingerprint: d1:c3:39:ea:27:84:eb:87:0f:93:4f:c5:63:4e:4a:a9:ad:55:05:01:64:01:f2:64:65:d3:7a:57:46:63:35:9f
+-----BEGIN CERTIFICATE-----
+MIIEXDCCA0SgAwIBAgIEOGO5ZjANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0xOTEy
+MjQxODIwNTFaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo3QwcjARBglghkgBhvhCAQEEBAMCAAcwHwYDVR0jBBgwFoAUVeSB0RGA
+vtiJuQijMfmhJAkWuXAwHQYDVR0OBBYEFFXkgdERgL7YibkIozH5oSQJFrlwMB0G
+CSqGSIb2fQdBAAQQMA4bCFY1LjA6NC4wAwIEkDANBgkqhkiG9w0BAQUFAAOCAQEA
+WUesIYSKF8mciVMeuoCFGsY8Tj6xnLZ8xpJdGGQC49MGCBFhfGPjK50xA3B20qMo
+oPS7mmNz7W3lKtvtFKkrxjYR0CvrB4ul2p5cGZ1WEvVUKcgF7bISKo30Axv/55IQ
+h7A6tcOdBTcSo8f0FbnVpDkWm1M6I5HxqIKiaohowXkCIryqptau37AUX7iH0N18
+f3v/rxzP5tsHrV7bhZ3QKw0z2wTR5klAEyt2+z7pnIkPFc4YsIV4IU9rTw76NmfN
+B/L/CNDi3tm/Kq+4h4YhPATKt5Rof8886ZjXOP/swNlQ8C5LWK5Gb9Auw2DaclVy
+vUxFnmG6v4SBkgPR0ml8xQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
+# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
+# Label: "Equifax Secure Global eBusiness CA"
+# Serial: 1
+# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc
+# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45
+# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07
+-----BEGIN CERTIFICATE-----
+MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT
+ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw
+MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj
+dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l
+c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC
+UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc
+58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/
+o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr
+aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA
+A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA
+Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv
+8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
+-----END CERTIFICATE-----
+
+# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
+# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
+# Label: "Equifax Secure eBusiness CA 1"
+# Serial: 4
+# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d
+# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41
+# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73
+-----BEGIN CERTIFICATE-----
+MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT
+ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw
+MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j
+LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ
+KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo
+RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu
+WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw
+Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD
+AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK
+eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM
+zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+
+WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN
+/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ==
+-----END CERTIFICATE-----
+
+# Issuer: O=Equifax Secure OU=Equifax Secure eBusiness CA-2
+# Subject: O=Equifax Secure OU=Equifax Secure eBusiness CA-2
+# Label: "Equifax Secure eBusiness CA 2"
+# Serial: 930140085
+# MD5 Fingerprint: aa:bf:bf:64:97:da:98:1d:6f:c6:08:3a:95:70:33:ca
+# SHA1 Fingerprint: 39:4f:f6:85:0b:06:be:52:e5:18:56:cc:10:e1:80:e8:82:b3:85:cc
+# SHA256 Fingerprint: 2f:27:4e:48:ab:a4:ac:7b:76:59:33:10:17:75:50:6d:c3:0e:e3:8e:f6:ac:d5:c0:49:32:cf:e0:41:23:42:20
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
+UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj
+dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0
+NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD
+VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B
+AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G
+vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/
+BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C
+AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl
+IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw
+NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq
+y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF
+MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
+A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy
+0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1
+E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Low-Value Services Root"
+# Serial: 1
+# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc
+# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d
+# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7
+-----BEGIN CERTIFICATE-----
+MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw
+MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML
+QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD
+VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA
+A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul
+CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n
+tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl
+dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch
+PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC
++Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O
+BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl
+MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk
+ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB
+IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X
+7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz
+43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY
+eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl
+pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA
+WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Label: "AddTrust External Root"
+# Serial: 1
+# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
+# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
+# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
+-----BEGIN CERTIFICATE-----
+MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
+IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
+MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
+FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
+bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
+dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
+H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
+uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
+mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
+a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
+E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
+WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
+VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
+Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
+cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
+IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
+AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
+YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
+6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
+Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
+c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
+mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Public Services Root"
+# Serial: 1
+# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f
+# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5
+# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx
+MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB
+ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV
+BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC
+AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV
+6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX
+GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP
+dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH
+1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF
+62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW
+BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw
+AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL
+MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU
+cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv
+b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6
+IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/
+iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao
+GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh
+4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm
+XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Qualified Certificates Root"
+# Serial: 1
+# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb
+# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf
+# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16
+-----BEGIN CERTIFICATE-----
+MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1
+MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK
+EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh
+BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq
+xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G
+87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i
+2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U
+WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1
+0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G
+A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr
+pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL
+ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm
+aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv
+hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm
+hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X
+dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3
+P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y
+iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no
+xqE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Label: "GeoTrust Global CA"
+# Serial: 144470
+# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
+# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
+# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
+-----BEGIN CERTIFICATE-----
+MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
+YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
+R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
+9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
+fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
+iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
+1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
+MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
+ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
+uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
+Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
+tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
+PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
+hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
+5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Global CA 2"
+# Serial: 1
+# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9
+# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d
+# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85
+-----BEGIN CERTIFICATE-----
+MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs
+IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg
+R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A
+PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8
+Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL
+TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL
+5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7
+S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe
+2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE
+FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap
+EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td
+EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv
+/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN
+A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0
+abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF
+I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz
+4iIprn2DQKi6bA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA"
+# Serial: 1
+# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
+# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
+# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
+-----BEGIN CERTIFICATE-----
+MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
+BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
+IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
+VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
+cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
+QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
+F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
+c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
+mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
+VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
+teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
+f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
+Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
+nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
+/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
+MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
+9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
+aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
+IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
+ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
+uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
+Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
+QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
+koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
+ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
+DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
+bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA 2"
+# Serial: 1
+# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
+# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
+# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
+VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
+c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
+WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
+FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
+XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
+se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
+KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
+IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
+y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
+hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
+QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
+Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
+HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
+KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
+dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
+L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
+Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
+ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
+T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
+GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
+1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
+OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
+6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
+QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
+-----END CERTIFICATE-----
+
+# Issuer: CN=America Online Root Certification Authority 1 O=America Online Inc.
+# Subject: CN=America Online Root Certification Authority 1 O=America Online Inc.
+# Label: "America Online Root Certification Authority 1"
+# Serial: 1
+# MD5 Fingerprint: 14:f1:08:ad:9d:fa:64:e2:89:e7:1c:cf:a8:ad:7d:5e
+# SHA1 Fingerprint: 39:21:c1:15:c1:5d:0e:ca:5c:cb:5b:c4:f0:7d:21:d8:05:0b:56:6a
+# SHA256 Fingerprint: 77:40:73:12:c6:3a:15:3d:5b:c0:0b:4e:51:75:9c:df:da:c2:37:dc:2a:33:b6:79:46:e9:8e:9b:fa:68:0a:e3
+-----BEGIN CERTIFICATE-----
+MIIDpDCCAoygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP
+bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAxMB4XDTAyMDUyODA2
+MDAwMFoXDTM3MTExOTIwNDMwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft
+ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg
+Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAKgv6KRpBgNHw+kqmP8ZonCaxlCyfqXfaE0bfA+2l2h9LaaLl+lk
+hsmj76CGv2BlnEtUiMJIxUo5vxTjWVXlGbR0yLQFOVwWpeKVBeASrlmLojNoWBym
+1BW32J/X3HGrfpq/m44zDyL9Hy7nBzbvYjnF3cu6JRQj3gzGPTzOggjmZj7aUTsW
+OqMFf6Dch9Wc/HKpoH145LcxVR5lu9RhsCFg7RAycsWSJR74kEoYeEfffjA3PlAb
+2xzTa5qGUwew76wGePiEmf4hjUyAtgyC9mZweRrTT6PP8c9GsEsPPt2IYriMqQko
+O3rHl+Ee5fSfwMCuJKDIodkP1nsmgmkyPacCAwEAAaNjMGEwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUAK3Zo/Z59m50qX8zPYEX10zPM94wHwYDVR0jBBgwFoAU
+AK3Zo/Z59m50qX8zPYEX10zPM94wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB
+BQUAA4IBAQB8itEfGDeC4Liwo+1WlchiYZwFos3CYiZhzRAW18y0ZTTQEYqtqKkF
+Zu90821fnZmv9ov761KyBZiibyrFVL0lvV+uyIbqRizBs73B6UlwGBaXCBOMIOAb
+LjpHyx7kADCVW/RFo8AasAFOq73AI25jP4BKxQft3OJvx8Fi8eNy1gTIdGcL+oir
+oQHIb/AUr9KZzVGTfu0uOMe9zkZQPXLjeSWdm4grECDdpbgyn43gKd8hdIaC2y+C
+MMbHNYaz+ZZfRtsMRf3zUMNvxsNIrUam4SdHCh0Om7bCd39j8uB9Gr784N/Xx6ds
+sPmuujz9dLQR6FgNgLzTqIA6me11zEZ7
+-----END CERTIFICATE-----
+
+# Issuer: CN=America Online Root Certification Authority 2 O=America Online Inc.
+# Subject: CN=America Online Root Certification Authority 2 O=America Online Inc.
+# Label: "America Online Root Certification Authority 2"
+# Serial: 1
+# MD5 Fingerprint: d6:ed:3c:ca:e2:66:0f:af:10:43:0d:77:9b:04:09:bf
+# SHA1 Fingerprint: 85:b5:ff:67:9b:0c:79:96:1f:c8:6e:44:22:00:46:13:db:17:92:84
+# SHA256 Fingerprint: 7d:3b:46:5a:60:14:e5:26:c0:af:fc:ee:21:27:d2:31:17:27:ad:81:1c:26:84:2d:00:6a:f3:73:06:cc:80:bd
+-----BEGIN CERTIFICATE-----
+MIIFpDCCA4ygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP
+bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAyMB4XDTAyMDUyODA2
+MDAwMFoXDTM3MDkyOTE0MDgwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft
+ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg
+Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIP
+ADCCAgoCggIBAMxBRR3pPU0Q9oyxQcngXssNt79Hc9PwVU3dxgz6sWYFas14tNwC
+206B89enfHG8dWOgXeMHDEjsJcQDIPT/DjsS/5uN4cbVG7RtIuOx238hZK+GvFci
+KtZHgVdEglZTvYYUAQv8f3SkWq7xuhG1m1hagLQ3eAkzfDJHA1zEpYNI9FdWboE2
+JxhP7JsowtS013wMPgwr38oE18aO6lhOqKSlGBxsRZijQdEt0sdtjRnxrXm3gT+9
+BoInLRBYBbV4Bbkv2wxrkJB+FFk4u5QkE+XRnRTf04JNRvCAOVIyD+OEsnpD8l7e
+Xz8d3eOyG6ChKiMDbi4BFYdcpnV1x5dhvt6G3NRI270qv0pV2uh9UPu0gBe4lL8B
+PeraunzgWGcXuVjgiIZGZ2ydEEdYMtA1fHkqkKJaEBEjNa0vzORKW6fIJ/KD3l67
+Xnfn6KVuY8INXWHQjNJsWiEOyiijzirplcdIz5ZvHZIlyMbGwcEMBawmxNJ10uEq
+Z8A9W6Wa6897GqidFEXlD6CaZd4vKL3Ob5Rmg0gp2OpljK+T2WSfVVcmv2/LNzGZ
+o2C7HK2JNDJiuEMhBnIMoVxtRsX6Kc8w3onccVvdtjc+31D1uAclJuW8tf48ArO3
++L5DwYcRlJ4jbBeKuIonDFRH8KmzwICMoCfrHRnjB453cMor9H124HhnAgMBAAGj
+YzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFE1FwWg4u3OpaaEg5+31IqEj
+FNeeMB8GA1UdIwQYMBaAFE1FwWg4u3OpaaEg5+31IqEjFNeeMA4GA1UdDwEB/wQE
+AwIBhjANBgkqhkiG9w0BAQUFAAOCAgEAZ2sGuV9FOypLM7PmG2tZTiLMubekJcmn
+xPBUlgtk87FYT15R/LKXeydlwuXK5w0MJXti4/qftIe3RUavg6WXSIylvfEWK5t2
+LHo1YGwRgJfMqZJS5ivmae2p+DYtLHe/YUjRYwu5W1LtGLBDQiKmsXeu3mnFzccc
+obGlHBD7GL4acN3Bkku+KVqdPzW+5X1R+FXgJXUjhx5c3LqdsKyzadsXg8n33gy8
+CNyRnqjQ1xU3c6U1uPx+xURABsPr+CKAXEfOAuMRn0T//ZoyzH1kUQ7rVyZ2OuMe
+IjzCpjbdGe+n/BLzJsBZMYVMnNjP36TMzCmT/5RtdlwTCJfy7aULTd3oyWgOZtMA
+DjMSW7yV5TKQqLPGbIOtd+6Lfn6xqavT4fG2wLHqiMDn05DpKJKUe2h7lyoKZy2F
+AjgQ5ANh1NolNscIWC2hp1GvMApJ9aZphwctREZ2jirlmjvXGKL8nDgQzMY70rUX
+Om/9riW99XJZZLF0KjhfGEzfz3EEWjbUvy+ZnOjZurGV5gJLIaFb1cFPj65pbVPb
+AZO1XB4Y3WRayhgoPmMEEf0cjQAPuDffZ4qdZqkCapH/E8ovXYO8h5Ns3CRRFgQl
+Zvqz2cK6Kb6aSDiCmfS/O0oxGfm/jiEzFMpPVF/7zvuPcX/9XhmgD0uRuMRUvAaw
+RY8mkaKO/qk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Certificate Services O=Comodo CA Limited
+# Subject: CN=Secure Certificate Services O=Comodo CA Limited
+# Label: "Comodo Secure Services root"
+# Serial: 1
+# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd
+# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1
+# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8
+-----BEGIN CERTIFICATE-----
+MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp
+ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow
+fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV
+BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM
+cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S
+HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996
+CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk
+3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz
+6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV
+HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud
+EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv
+Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw
+Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww
+DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0
+5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj
+Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI
+gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ
+aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl
+izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited
+# Subject: CN=Trusted Certificate Services O=Comodo CA Limited
+# Label: "Comodo Trusted Services root"
+# Serial: 1
+# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27
+# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd
+# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0
+aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla
+MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO
+BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD
+VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW
+fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt
+TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL
+fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW
+1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7
+kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G
+A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v
+ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo
+dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu
+Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/
+HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32
+pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS
+jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+
+xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn
+dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi
+-----END CERTIFICATE-----
+
+# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
+# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
+# Label: "UTN DATACorp SGC Root CA"
+# Serial: 91374294542884689855167577680241077609
+# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06
+# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4
+# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48
+-----BEGIN CERTIFICATE-----
+MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB
+kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
+Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
+dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw
+IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG
+EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD
+VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu
+dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6
+E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ
+D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK
+4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq
+lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW
+bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB
+o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT
+MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js
+LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr
+BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB
+AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft
+Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj
+j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH
+KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv
+2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3
+mfnGV/TJVTl4uix5yaaIK/QI
+-----END CERTIFICATE-----
+
+# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
+# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
+# Label: "UTN USERFirst Hardware Root CA"
+# Serial: 91374294542884704022267039221184531197
+# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39
+# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7
+# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37
+-----BEGIN CERTIFICATE-----
+MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB
+lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
+Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
+dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt
+SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG
+A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe
+MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v
+d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh
+cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn
+0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ
+M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a
+MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd
+oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI
+DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy
+oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD
+VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0
+dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy
+bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF
+BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM
+//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli
+CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE
+CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t
+3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS
+KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Label: "StartCom Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16
+# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f
+# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea
+-----BEGIN CERTIFICATE-----
+MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
+Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9
+MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
+U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
+cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
+pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
+OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
+Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
+Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
+HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
+Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
+Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
+26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
+AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE
+FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j
+ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js
+LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM
+BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0
+Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy
+dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh
+cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh
+YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg
+dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp
+bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ
+YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT
+TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ
+9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8
+jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW
+FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz
+ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1
+ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L
+EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu
+L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq
+yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC
+O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V
+um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh
+NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Label: "GeoTrust Primary Certification Authority"
+# Serial: 32798226551256963324313806436981982369
+# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
+# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
+# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
+-----BEGIN CERTIFICATE-----
+MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
+MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
+R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
+MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
+Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
+AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
+ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
+7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
+kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
+mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
+KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
+6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
+4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
+oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
+UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
+AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA"
+# Serial: 69529181992039203566298953787712940909
+# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
+# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
+# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
+qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
+BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
+NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
+LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
+A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
+IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
+W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
+3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
+6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
+Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
+NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
+r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
+DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
+YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
+xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
+/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
+LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
+jVaMaA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
+# Serial: 33037644167568058970164719475676101450
+# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
+# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
+# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
+-----BEGIN CERTIFICATE-----
+MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
+yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
+ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
+nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
+t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
+SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
+BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
+rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
+NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
+BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
+BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
+aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
+MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
+p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
+5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
+WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
+4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
+hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Label: "Network Solutions Certificate Authority"
+# Serial: 116697915152937497490437556386812487904
+# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
+# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
+# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
+-----BEGIN CERTIFICATE-----
+MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
+MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
+MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
+dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
+UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
+ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
+c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
+OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
+mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
+BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
+qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
+gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
+bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
+dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
+6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
+h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
+/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
+wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
+pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA
+# Subject: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA
+# Label: "TC TrustCenter Class 2 CA II"
+# Serial: 941389028203453866782103406992443
+# MD5 Fingerprint: ce:78:33:5c:59:78:01:6e:18:ea:b9:36:a0:b9:2e:23
+# SHA1 Fingerprint: ae:50:83:ed:7c:f4:5c:bc:8f:61:c6:21:fe:68:5d:79:42:21:15:6e
+# SHA256 Fingerprint: e6:b8:f8:76:64:85:f8:07:ae:7f:8d:ac:16:70:46:1f:07:c0:a1:3e:ef:3a:1f:f7:17:53:8d:7a:ba:d3:91:b4
+-----BEGIN CERTIFICATE-----
+MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL
+MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV
+BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0
+Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYwMTEyMTQzODQzWhcNMjUxMjMxMjI1
+OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i
+SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UEAxMc
+VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jf
+tMjWQ+nEdVl//OEd+DFwIxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKg
+uNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2J
+XjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQXa7pIXSSTYtZgo+U4+lK
+8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7uSNQZu+99
+5OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3
+kUrL84J6E1wIqzCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy
+dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6
+Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz
+JTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290
+Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u
+TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iS
+GNn3Bzn1LL4GdXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprt
+ZjluS5TmVfwLG4t3wVMTZonZKNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8
+au0WOB9/WIFaGusyiC2y8zl3gK9etmF1KdsjTYjKUCjLhdLTEKJZbtOTVAB6okaV
+hgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kPJOzHdiEoZa5X6AeI
+dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA
+# Subject: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA
+# Label: "TC TrustCenter Class 3 CA II"
+# Serial: 1506523511417715638772220530020799
+# MD5 Fingerprint: 56:5f:aa:80:61:12:17:f6:67:21:e6:2b:6d:61:56:8e
+# SHA1 Fingerprint: 80:25:ef:f4:6e:70:c8:d4:72:24:65:84:fe:40:3b:8a:8d:6a:db:f5
+# SHA256 Fingerprint: 8d:a0:84:fc:f9:9c:e0:77:22:f8:9b:32:05:93:98:06:fa:5c:b8:11:e1:c8:13:f6:a1:08:c7:d3:36:b3:40:8e
+-----BEGIN CERTIFICATE-----
+MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjEL
+MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV
+BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDMgQ0ExJTAjBgNVBAMTHFRDIFRydXN0
+Q2VudGVyIENsYXNzIDMgQ0EgSUkwHhcNMDYwMTEyMTQ0MTU3WhcNMjUxMjMxMjI1
+OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i
+SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQTElMCMGA1UEAxMc
+VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBALTgu1G7OVyLBMVMeRwjhjEQY0NVJz/GRcekPewJDRoeIMJW
+Ht4bNwcwIi9v8Qbxq63WyKthoy9DxLCyLfzDlml7forkzMA5EpBCYMnMNWju2l+Q
+Vl/NHE1bWEnrDgFPZPosPIlY2C8u4rBo6SI7dYnWRBpl8huXJh0obazovVkdKyT2
+1oQDZogkAHhg8fir/gKya/si+zXmFtGt9i4S5Po1auUZuV3bOx4a+9P/FRQI2Alq
+ukWdFHlgfa9Aigdzs5OW03Q0jTo3Kd5c7PXuLjHCINy+8U9/I1LZW+Jk2ZyqBwi1
+Rb3R0DHBq1SfqdLDYmAD8bs5SpJKPQq5ncWg/jcCAwEAAaOCATQwggEwMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTUovyfs8PYA9NX
+XAek0CSnwPIA1DCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy
+dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18zX2NhX0lJLmNybIaBn2xkYXA6
+Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz
+JTIwMyUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290
+Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u
+TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEANmDkcPcGIEPZIxpC8vijsrlN
+irTzwppVMXzEO2eatN9NDoqTSheLG43KieHPOh6sHfGcMrSOWXaiQYUlN6AT0PV8
+TtXqluJucsG7Kv5sbviRmEb8yRtXW+rIGjs/sFGYPAfaLFkB2otE6OF0/ado3VS6
+g0bsyEa1+K+XwDsJHI/OcpY9M1ZwvJbL2NV9IJqDnxrcOfHFcqMRA/07QlIp2+gB
+95tejNaNhk4Z+rwcvsUhpYeeeC422wlxo3I0+GzjBgnyXlal092Y+tTmBvTwtiBj
+S+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc5A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
+# Subject: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
+# Label: "TC TrustCenter Universal CA I"
+# Serial: 601024842042189035295619584734726
+# MD5 Fingerprint: 45:e1:a5:72:c5:a9:36:64:40:9e:f5:e4:58:84:67:8c
+# SHA1 Fingerprint: 6b:2f:34:ad:89:58:be:62:fd:b0:6b:5c:ce:bb:9d:d9:4f:4e:39:f3
+# SHA256 Fingerprint: eb:f3:c0:2a:87:89:b1:fb:7d:51:19:95:d6:63:b7:29:06:d9:13:ce:0d:5e:10:56:8a:8a:77:e2:58:61:67:e7
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL
+MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV
+BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1
+c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcNMDYwMzIyMTU1NDI4WhcNMjUxMjMx
+MjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIg
+R21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYwJAYD
+VQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSR
+JJZ4Hgmgm5qVSkr1YnwCqMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3T
+fCZdzHd55yx4Oagmcw6iXSVphU9VDprvxrlE4Vc93x9UIuVvZaozhDrzznq+VZeu
+jRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtwag+1m7Z3W0hZneTvWq3z
+wZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9OgdwZu5GQ
+fezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYD
+VR0jBBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0G
+CSqGSIb3DQEBBQUAA4IBAQAo0uCG1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X1
+7caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/CyvwbZ71q+s2IhtNerNXxTPqYn
+8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3ghUJGooWMNjs
+ydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT
+ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/
+2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY
+-----END CERTIFICATE-----
+
+# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Label: "Cybertrust Global Root"
+# Serial: 4835703278459682877484360
+# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
+# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
+# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
+-----BEGIN CERTIFICATE-----
+MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
+A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
+bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
+ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
+b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
+7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
+J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
+HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
+t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
+FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
+XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
+hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
+MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
+A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
+Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
+XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
+omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
+A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
+WL1WMRJOEcgh4LMRkWXbtKaIOM5V
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G3"
+# Serial: 28809105769928564313984085209975885599
+# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
+# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
+# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
+MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
+eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
+cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
+BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
+MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
+BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
+hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
+5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
+JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
+DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
+huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
+HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
+AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
+zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
+kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
+AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
+SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
+spki4cErx5z481+oghLrGREt
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G2"
+# Serial: 71758320672825410020661621085256472406
+# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
+# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
+# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
+-----BEGIN CERTIFICATE-----
+MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
+IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
+BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
+MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
+d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
+YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
+dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
+BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
+papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
+DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
+KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
+XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G3"
+# Serial: 127614157056681299805556476275995414779
+# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
+# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
+# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
+rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
+BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
+Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
+LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
+MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
+ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
+gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
+YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
+b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
+9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
+zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
+OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
+HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
+2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
+oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
+t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
+KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
+m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
+MdRAGmI0Nj81Aa6sY6A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G2"
+# Serial: 80682863203381065782177908751794619243
+# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
+# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
+# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
+MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
+KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
+MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
+eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
+BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
+NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
+BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
+MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
+So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
+tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
+CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
+qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
+rD6ogRLQy7rQkgu2npaqBA+K
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Universal Root Certification Authority"
+# Serial: 85209574734084581917763752644031726877
+# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
+# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
+# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
+-----BEGIN CERTIFICATE-----
+MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
+vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
+ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
+IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
+IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
+bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
+9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
+H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
+LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
+/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
+rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
+WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
+exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
+DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
+sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
+seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
+4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
+lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
+7M2CYfE45k+XmCpajQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
+# Serial: 63143484348153506665311985501458640051
+# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
+# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
+# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
+-----BEGIN CERTIFICATE-----
+MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
+U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
+SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
+biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
+GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
+fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
+aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
+aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
+kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
+4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
+FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
+# Subject: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
+# Label: "TC TrustCenter Universal CA III"
+# Serial: 2010889993983507346460533407902964
+# MD5 Fingerprint: 9f:dd:db:ab:ff:8e:ff:45:21:5f:f0:6c:9d:8f:fe:2b
+# SHA1 Fingerprint: 96:56:cd:7b:57:96:98:95:d0:e1:41:46:68:06:fb:b8:c6:11:06:87
+# SHA256 Fingerprint: 30:9b:4a:87:f6:ca:56:c9:31:69:aa:a9:9c:6d:98:88:54:d7:89:2b:d5:43:7e:2d:07:b2:9c:be:da:55:d3:5d
+-----BEGIN CERTIFICATE-----
+MIID4TCCAsmgAwIBAgIOYyUAAQACFI0zFQLkbPQwDQYJKoZIhvcNAQEFBQAwezEL
+MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV
+BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEoMCYGA1UEAxMfVEMgVHJ1
+c3RDZW50ZXIgVW5pdmVyc2FsIENBIElJSTAeFw0wOTA5MDkwODE1MjdaFw0yOTEy
+MzEyMzU5NTlaMHsxCzAJBgNVBAYTAkRFMRwwGgYDVQQKExNUQyBUcnVzdENlbnRl
+ciBHbWJIMSQwIgYDVQQLExtUQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0ExKDAm
+BgNVBAMTH1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQSBJSUkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDC2pxisLlxErALyBpXsq6DFJmzNEubkKLF
+5+cvAqBNLaT6hdqbJYUtQCggbergvbFIgyIpRJ9Og+41URNzdNW88jBmlFPAQDYv
+DIRlzg9uwliT6CwLOunBjvvya8o84pxOjuT5fdMnnxvVZ3iHLX8LR7PH6MlIfK8v
+zArZQe+f/prhsq75U7Xl6UafYOPfjdN/+5Z+s7Vy+EutCHnNaYlAJ/Uqwa1D7KRT
+yGG299J5KmcYdkhtWyUB0SbFt1dpIxVbYYqt8Bst2a9c8SaQaanVDED1M4BDj5yj
+dipFtK+/fz6HP3bFzSreIMUWWMv5G/UPyw0RUmS40nZid4PxWJ//AgMBAAGjYzBh
+MB8GA1UdIwQYMBaAFFbn4VslQ4Dg9ozhcbyO5YAvxEjiMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRW5+FbJUOA4PaM4XG8juWAL8RI
+4jANBgkqhkiG9w0BAQUFAAOCAQEAg8ev6n9NCjw5sWi+e22JLumzCecYV42Fmhfz
+dkJQEw/HkG8zrcVJYCtsSVgZ1OK+t7+rSbyUyKu+KGwWaODIl0YgoGhnYIg5IFHY
+aAERzqf2EQf27OysGh+yZm5WZ2B6dF7AbZc2rrUNXWZzwCUyRdhKBgePxLcHsU0G
+DeGl6/R1yrqc0L2z0zIkTO5+4nYES0lT2PLpVDP85XEfPRRclkvxOvIAu2y0+pZV
+CIgJwcyRGSmwIC3/yzikQOEXvnlhgP8HA4ZMTnsGnxGGjYnuJ8Tb4rwZjgvDwxPH
+LQNjO9Po5KIqwoIIlBZU8O8fJ5AluA0OKBtHd0e9HKgl8ZS0Zg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Label: "StartCom Certification Authority"
+# Serial: 45
+# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16
+# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0
+# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11
+-----BEGIN CERTIFICATE-----
+MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
+Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9
+MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
+U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
+cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
+pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
+OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
+Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
+Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
+HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
+Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
+Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
+26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
+AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul
+F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC
+ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w
+ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk
+aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0
+YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg
+c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93
+d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG
+CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1
+dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF
+wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS
+Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst
+0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc
+pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl
+CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF
+P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK
+1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm
+KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE
+JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ
+8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm
+fyWl8kgAwKQB2j8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd.
+# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd.
+# Label: "StartCom Certification Authority G2"
+# Serial: 59
+# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64
+# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17
+# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95
+-----BEGIN CERTIFICATE-----
+MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1
+OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG
+A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ
+JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD
+vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo
+D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/
+Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW
+RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK
+HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN
+nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM
+0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i
+UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9
+Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg
+TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
+AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL
+BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K
+2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX
+UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl
+6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK
+9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ
+HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI
+wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY
+XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l
+IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo
+hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr
+so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2, OU=www.digicert.com, O=DigiCert Inc, C=US
+# Subject: CN=DigiCert Global Root G2, OU=www.digicert.com, O=DigiCert Inc, C=US
+# Serial: 33af1e6a711a9a0bb2864b11d09fae5
+# MD5 Fingerprint: E4:A6:8A:C8:54:AC:52:42:46:0A:FD:72:48:1B:2A:44
+# SHA1 Fingerprint: DF:3C:24:F9:BF:D6:66:76:1B:26:80:73:FE:06:D1:CC:8D:4F:82:A4
+# SHA256 Fingerprint: CB:3C:CB:B7:60:31:E5:E0:13:8F:8D:D3:9A:23:F9:DE:47:FF:C3:5E:43:C1:14:4C:EA:27:D4:6A:5A:B1:CB:5F
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
+
+# Issuer: /C=US/O=Internet Security Research Group/CN=ISRG Root X1
+# Subject: /C=US/O=Internet Security Research Group/CN=ISRG Root X1
+# Serial: 8210CFB0D240E3594463E0BB63828B00
+# SHA1 Fingerprint: CA:BD:2A:79:A1:07:6A:31:F2:1D:25:36:35:CB:03:9D:43:29:A5:E8
+# SHA256 Fingerprint: 96:BC:EC:06:26:49:76:F3:74:60:77:9A:CF:28:C5:A7:CF:E8:A3:C0:AA:E1:1A:8F:FC:EE:05:C0:BD:DF:08:C6
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
+TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
+cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
+WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
+ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
+h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
+0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
+A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
+T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
+B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
+B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
+KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
+OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
+jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
+qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
+rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
+hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
+ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
+3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
+NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
+ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
+TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
+jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
+oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
+4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
+mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
+emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
+-----END CERTIFICATE-----
+
+# Issuer: /C=US/O=Internet Security Research Group/CN=ISRG Root X2
+# Subject: /C=US/O=Internet Security Research Group/CN=ISRG Root X2
+# Serial: 41D29DD172EAEEA780C12C6CE92F8752
+# SHA1 Fingerprint: BD:B1:B9:3C:D5:97:8D:45:C6:26:14:55:F8:DB:95:C7:5A:D1:53:AF
+# SHA256 Fingerprint: 69:72:9B:8E:15:A8:6E:FC:17:7A:57:AF:B7:17:1D:FC:64:AD:D2:8C:2F:CA:8C:F1:50:7E:34:45:3C:CB:14:70
+-----BEGIN CERTIFICATE-----
+MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw
+CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg
+R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00
+MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT
+ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw
+EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW
++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9
+ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI
+zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW
+tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1
+/q4AaOeMSQ+2b1tbFfLn
+-----END CERTIFICATE-----
diff --git a/Lib/site-packages/httplib2/certs.py b/Lib/site-packages/httplib2/certs.py
new file mode 100644
index 0000000..59d1ffc
--- /dev/null
+++ b/Lib/site-packages/httplib2/certs.py
@@ -0,0 +1,42 @@
+"""Utilities for certificate management."""
+
+import os
+
+certifi_available = False
+certifi_where = None
+try:
+ from certifi import where as certifi_where
+ certifi_available = True
+except ImportError:
+ pass
+
+custom_ca_locater_available = False
+custom_ca_locater_where = None
+try:
+ from ca_certs_locater import get as custom_ca_locater_where
+ custom_ca_locater_available = True
+except ImportError:
+ pass
+
+
+BUILTIN_CA_CERTS = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), "cacerts.txt"
+)
+
+
+def where():
+ env = os.environ.get("HTTPLIB2_CA_CERTS")
+ if env is not None:
+ if os.path.isfile(env):
+ return env
+ else:
+ raise RuntimeError("Environment variable HTTPLIB2_CA_CERTS not a valid file")
+ if custom_ca_locater_available:
+ return custom_ca_locater_where()
+ if certifi_available:
+ return certifi_where()
+ return BUILTIN_CA_CERTS
+
+
+if __name__ == "__main__":
+ print(where())
diff --git a/Lib/site-packages/httplib2/error.py b/Lib/site-packages/httplib2/error.py
new file mode 100644
index 0000000..0e68c12
--- /dev/null
+++ b/Lib/site-packages/httplib2/error.py
@@ -0,0 +1,48 @@
+# All exceptions raised here derive from HttpLib2Error
+class HttpLib2Error(Exception):
+ pass
+
+
+# Some exceptions can be caught and optionally
+# be turned back into responses.
+class HttpLib2ErrorWithResponse(HttpLib2Error):
+ def __init__(self, desc, response, content):
+ self.response = response
+ self.content = content
+ HttpLib2Error.__init__(self, desc)
+
+
+class RedirectMissingLocation(HttpLib2ErrorWithResponse):
+ pass
+
+
+class RedirectLimit(HttpLib2ErrorWithResponse):
+ pass
+
+
+class FailedToDecompressContent(HttpLib2ErrorWithResponse):
+ pass
+
+
+class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse):
+ pass
+
+
+class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse):
+ pass
+
+
+class MalformedHeader(HttpLib2Error):
+ pass
+
+
+class RelativeURIError(HttpLib2Error):
+ pass
+
+
+class ServerNotFoundError(HttpLib2Error):
+ pass
+
+
+class ProxiesUnavailableError(HttpLib2Error):
+ pass
diff --git a/Lib/site-packages/httplib2/iri2uri.py b/Lib/site-packages/httplib2/iri2uri.py
new file mode 100644
index 0000000..86e361e
--- /dev/null
+++ b/Lib/site-packages/httplib2/iri2uri.py
@@ -0,0 +1,124 @@
+# -*- coding: utf-8 -*-
+"""Converts an IRI to a URI."""
+
+__author__ = "Joe Gregorio (joe@bitworking.org)"
+__copyright__ = "Copyright 2006, Joe Gregorio"
+__contributors__ = []
+__version__ = "1.0.0"
+__license__ = "MIT"
+
+import urllib.parse
+
+# Convert an IRI to a URI following the rules in RFC 3987
+#
+# The characters we need to enocde and escape are defined in the spec:
+#
+# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD
+# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF
+# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD
+# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD
+# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD
+# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD
+# / %xD0000-DFFFD / %xE1000-EFFFD
+
+escape_range = [
+ (0xA0, 0xD7FF),
+ (0xE000, 0xF8FF),
+ (0xF900, 0xFDCF),
+ (0xFDF0, 0xFFEF),
+ (0x10000, 0x1FFFD),
+ (0x20000, 0x2FFFD),
+ (0x30000, 0x3FFFD),
+ (0x40000, 0x4FFFD),
+ (0x50000, 0x5FFFD),
+ (0x60000, 0x6FFFD),
+ (0x70000, 0x7FFFD),
+ (0x80000, 0x8FFFD),
+ (0x90000, 0x9FFFD),
+ (0xA0000, 0xAFFFD),
+ (0xB0000, 0xBFFFD),
+ (0xC0000, 0xCFFFD),
+ (0xD0000, 0xDFFFD),
+ (0xE1000, 0xEFFFD),
+ (0xF0000, 0xFFFFD),
+ (0x100000, 0x10FFFD),
+]
+
+
+def encode(c):
+ retval = c
+ i = ord(c)
+ for low, high in escape_range:
+ if i < low:
+ break
+ if i >= low and i <= high:
+ retval = "".join(["%%%2X" % o for o in c.encode("utf-8")])
+ break
+ return retval
+
+
+def iri2uri(uri):
+ """Convert an IRI to a URI. Note that IRIs must be
+ passed in a unicode strings. That is, do not utf-8 encode
+ the IRI before passing it into the function."""
+ if isinstance(uri, str):
+ (scheme, authority, path, query, fragment) = urllib.parse.urlsplit(uri)
+ authority = authority.encode("idna").decode("utf-8")
+ # For each character in 'ucschar' or 'iprivate'
+ # 1. encode as utf-8
+ # 2. then %-encode each octet of that utf-8
+ uri = urllib.parse.urlunsplit((scheme, authority, path, query, fragment))
+ uri = "".join([encode(c) for c in uri])
+ return uri
+
+
+if __name__ == "__main__":
+ import unittest
+
+ class Test(unittest.TestCase):
+ def test_uris(self):
+ """Test that URIs are invariant under the transformation."""
+ invariant = [
+ "ftp://ftp.is.co.za/rfc/rfc1808.txt",
+ "http://www.ietf.org/rfc/rfc2396.txt",
+ "ldap://[2001:db8::7]/c=GB?objectClass?one",
+ "mailto:John.Doe@example.com",
+ "news:comp.infosystems.www.servers.unix",
+ "tel:+1-816-555-1212",
+ "telnet://192.0.2.16:80/",
+ "urn:oasis:names:specification:docbook:dtd:xml:4.1.2",
+ ]
+ for uri in invariant:
+ self.assertEqual(uri, iri2uri(uri))
+
+ def test_iri(self):
+ """Test that the right type of escaping is done for each part of the URI."""
+ self.assertEqual(
+ "http://xn--o3h.com/%E2%98%84",
+ iri2uri("http://\N{COMET}.com/\N{COMET}"),
+ )
+ self.assertEqual(
+ "http://bitworking.org/?fred=%E2%98%84",
+ iri2uri("http://bitworking.org/?fred=\N{COMET}"),
+ )
+ self.assertEqual(
+ "http://bitworking.org/#%E2%98%84",
+ iri2uri("http://bitworking.org/#\N{COMET}"),
+ )
+ self.assertEqual("#%E2%98%84", iri2uri("#\N{COMET}"))
+ self.assertEqual(
+ "/fred?bar=%E2%98%9A#%E2%98%84",
+ iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"),
+ )
+ self.assertEqual(
+ "/fred?bar=%E2%98%9A#%E2%98%84",
+ iri2uri(iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")),
+ )
+ self.assertNotEqual(
+ "/fred?bar=%E2%98%9A#%E2%98%84",
+ iri2uri(
+ "/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode("utf-8")
+ ),
+ )
+
+ unittest.main()
diff --git a/Lib/site-packages/httplib2/socks.py b/Lib/site-packages/httplib2/socks.py
new file mode 100644
index 0000000..cc68e63
--- /dev/null
+++ b/Lib/site-packages/httplib2/socks.py
@@ -0,0 +1,518 @@
+"""SocksiPy - Python SOCKS module.
+
+Version 1.00
+
+Copyright 2006 Dan-Haim. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+3. Neither the name of Dan Haim nor the names of his contributors may be used
+ to endorse or promote products derived from this software without specific
+ prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
+OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
+
+This module provides a standard socket-like interface for Python
+for tunneling connections through SOCKS proxies.
+
+Minor modifications made by Christopher Gilbert (http://motomastyle.com/) for
+use in PyLoris (http://pyloris.sourceforge.net/).
+
+Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/)
+mainly to merge bug fixes found in Sourceforge.
+"""
+
+import base64
+import socket
+import struct
+import sys
+
+if getattr(socket, "socket", None) is None:
+ raise ImportError("socket.socket missing, proxy support unusable")
+
+PROXY_TYPE_SOCKS4 = 1
+PROXY_TYPE_SOCKS5 = 2
+PROXY_TYPE_HTTP = 3
+PROXY_TYPE_HTTP_NO_TUNNEL = 4
+
+_defaultproxy = None
+_orgsocket = socket.socket
+
+
+class ProxyError(Exception):
+ pass
+
+
+class GeneralProxyError(ProxyError):
+ pass
+
+
+class Socks5AuthError(ProxyError):
+ pass
+
+
+class Socks5Error(ProxyError):
+ pass
+
+
+class Socks4Error(ProxyError):
+ pass
+
+
+class HTTPError(ProxyError):
+ pass
+
+
+_generalerrors = (
+ "success",
+ "invalid data",
+ "not connected",
+ "not available",
+ "bad proxy type",
+ "bad input",
+)
+
+_socks5errors = (
+ "succeeded",
+ "general SOCKS server failure",
+ "connection not allowed by ruleset",
+ "Network unreachable",
+ "Host unreachable",
+ "Connection refused",
+ "TTL expired",
+ "Command not supported",
+ "Address type not supported",
+ "Unknown error",
+)
+
+_socks5autherrors = (
+ "succeeded",
+ "authentication is required",
+ "all offered authentication methods were rejected",
+ "unknown username or invalid password",
+ "unknown error",
+)
+
+_socks4errors = (
+ "request granted",
+ "request rejected or failed",
+ "request rejected because SOCKS server cannot connect to identd on the client",
+ "request rejected because the client program and identd report different "
+ "user-ids",
+ "unknown error",
+)
+
+
+def setdefaultproxy(
+ proxytype=None, addr=None, port=None, rdns=True, username=None, password=None
+):
+ """setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
+ Sets a default proxy which all further socksocket objects will use,
+ unless explicitly changed.
+ """
+ global _defaultproxy
+ _defaultproxy = (proxytype, addr, port, rdns, username, password)
+
+
+def wrapmodule(module):
+ """wrapmodule(module)
+
+ Attempts to replace a module's socket library with a SOCKS socket. Must set
+ a default proxy using setdefaultproxy(...) first.
+ This will only work on modules that import socket directly into the
+ namespace;
+ most of the Python Standard Library falls into this category.
+ """
+ if _defaultproxy != None:
+ module.socket.socket = socksocket
+ else:
+ raise GeneralProxyError((4, "no proxy specified"))
+
+
+class socksocket(socket.socket):
+ """socksocket([family[, type[, proto]]]) -> socket object
+ Open a SOCKS enabled socket. The parameters are the same as
+ those of the standard socket init. In order for SOCKS to work,
+ you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
+ """
+
+ def __init__(
+ self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None
+ ):
+ _orgsocket.__init__(self, family, type, proto, _sock)
+ if _defaultproxy != None:
+ self.__proxy = _defaultproxy
+ else:
+ self.__proxy = (None, None, None, None, None, None)
+ self.__proxysockname = None
+ self.__proxypeername = None
+ self.__httptunnel = True
+
+ def __recvall(self, count):
+ """__recvall(count) -> data
+ Receive EXACTLY the number of bytes requested from the socket.
+ Blocks until the required number of bytes have been received.
+ """
+ data = self.recv(count)
+ while len(data) < count:
+ d = self.recv(count - len(data))
+ if not d:
+ raise GeneralProxyError((0, "connection closed unexpectedly"))
+ data = data + d
+ return data
+
+ def sendall(self, content, *args):
+ """ override socket.socket.sendall method to rewrite the header
+ for non-tunneling proxies if needed
+ """
+ if not self.__httptunnel:
+ content = self.__rewriteproxy(content)
+ return super(socksocket, self).sendall(content, *args)
+
+ def __rewriteproxy(self, header):
+ """ rewrite HTTP request headers to support non-tunneling proxies
+ (i.e. those which do not support the CONNECT method).
+ This only works for HTTP (not HTTPS) since HTTPS requires tunneling.
+ """
+ host, endpt = None, None
+ hdrs = header.split("\r\n")
+ for hdr in hdrs:
+ if hdr.lower().startswith("host:"):
+ host = hdr
+ elif hdr.lower().startswith("get") or hdr.lower().startswith("post"):
+ endpt = hdr
+ if host and endpt:
+ hdrs.remove(host)
+ hdrs.remove(endpt)
+ host = host.split(" ")[1]
+ endpt = endpt.split(" ")
+ if self.__proxy[4] != None and self.__proxy[5] != None:
+ hdrs.insert(0, self.__getauthheader())
+ hdrs.insert(0, "Host: %s" % host)
+ hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2]))
+ return "\r\n".join(hdrs)
+
+ def __getauthheader(self):
+ auth = self.__proxy[4] + b":" + self.__proxy[5]
+ return "Proxy-Authorization: Basic " + base64.b64encode(auth).decode()
+
+ def setproxy(
+ self,
+ proxytype=None,
+ addr=None,
+ port=None,
+ rdns=True,
+ username=None,
+ password=None,
+ headers=None,
+ ):
+ """setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
+
+ Sets the proxy to be used.
+ proxytype - The type of the proxy to be used. Three types
+ are supported: PROXY_TYPE_SOCKS4 (including socks4a),
+ PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
+ addr - The address of the server (IP or DNS).
+ port - The port of the server. Defaults to 1080 for SOCKS
+ servers and 8080 for HTTP proxy servers.
+ rdns - Should DNS queries be preformed on the remote side
+ (rather than the local side). The default is True.
+ Note: This has no effect with SOCKS4 servers.
+ username - Username to authenticate with to the server.
+ The default is no authentication.
+ password - Password to authenticate with to the server.
+ Only relevant when username is also provided.
+ headers - Additional or modified headers for the proxy connect
+ request.
+ """
+ self.__proxy = (
+ proxytype,
+ addr,
+ port,
+ rdns,
+ username.encode() if username else None,
+ password.encode() if password else None,
+ headers,
+ )
+
+ def __negotiatesocks5(self, destaddr, destport):
+ """__negotiatesocks5(self,destaddr,destport)
+ Negotiates a connection through a SOCKS5 server.
+ """
+ # First we'll send the authentication packages we support.
+ if (self.__proxy[4] != None) and (self.__proxy[5] != None):
+ # The username/password details were supplied to the
+ # setproxy method so we support the USERNAME/PASSWORD
+ # authentication (in addition to the standard none).
+ self.sendall(struct.pack("BBBB", 0x05, 0x02, 0x00, 0x02))
+ else:
+ # No username/password were entered, therefore we
+ # only support connections with no authentication.
+ self.sendall(struct.pack("BBB", 0x05, 0x01, 0x00))
+ # We'll receive the server's response to determine which
+ # method was selected
+ chosenauth = self.__recvall(2)
+ if chosenauth[0:1] != chr(0x05).encode():
+ self.close()
+ raise GeneralProxyError((1, _generalerrors[1]))
+ # Check the chosen authentication method
+ if chosenauth[1:2] == chr(0x00).encode():
+ # No authentication is required
+ pass
+ elif chosenauth[1:2] == chr(0x02).encode():
+ # Okay, we need to perform a basic username/password
+ # authentication.
+ packet = bytearray()
+ packet.append(0x01)
+ packet.append(len(self.__proxy[4]))
+ packet.extend(self.__proxy[4])
+ packet.append(len(self.__proxy[5]))
+ packet.extend(self.__proxy[5])
+ self.sendall(packet)
+ authstat = self.__recvall(2)
+ if authstat[0:1] != chr(0x01).encode():
+ # Bad response
+ self.close()
+ raise GeneralProxyError((1, _generalerrors[1]))
+ if authstat[1:2] != chr(0x00).encode():
+ # Authentication failed
+ self.close()
+ raise Socks5AuthError((3, _socks5autherrors[3]))
+ # Authentication succeeded
+ else:
+ # Reaching here is always bad
+ self.close()
+ if chosenauth[1] == chr(0xFF).encode():
+ raise Socks5AuthError((2, _socks5autherrors[2]))
+ else:
+ raise GeneralProxyError((1, _generalerrors[1]))
+ # Now we can request the actual connection
+ req = struct.pack("BBB", 0x05, 0x01, 0x00)
+ # If the given destination address is an IP address, we'll
+ # use the IPv4 address request even if remote resolving was specified.
+ try:
+ ipaddr = socket.inet_aton(destaddr)
+ req = req + chr(0x01).encode() + ipaddr
+ except socket.error:
+ # Well it's not an IP number, so it's probably a DNS name.
+ if self.__proxy[3]:
+ # Resolve remotely
+ ipaddr = None
+ req = (
+ req
+ + chr(0x03).encode()
+ + chr(len(destaddr)).encode()
+ + destaddr.encode()
+ )
+ else:
+ # Resolve locally
+ ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
+ req = req + chr(0x01).encode() + ipaddr
+ req = req + struct.pack(">H", destport)
+ self.sendall(req)
+ # Get the response
+ resp = self.__recvall(4)
+ if resp[0:1] != chr(0x05).encode():
+ self.close()
+ raise GeneralProxyError((1, _generalerrors[1]))
+ elif resp[1:2] != chr(0x00).encode():
+ # Connection failed
+ self.close()
+ if ord(resp[1:2]) <= 8:
+ raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])]))
+ else:
+ raise Socks5Error((9, _socks5errors[9]))
+ # Get the bound address/port
+ elif resp[3:4] == chr(0x01).encode():
+ boundaddr = self.__recvall(4)
+ elif resp[3:4] == chr(0x03).encode():
+ resp = resp + self.recv(1)
+ boundaddr = self.__recvall(ord(resp[4:5]))
+ else:
+ self.close()
+ raise GeneralProxyError((1, _generalerrors[1]))
+ boundport = struct.unpack(">H", self.__recvall(2))[0]
+ self.__proxysockname = (boundaddr, boundport)
+ if ipaddr != None:
+ self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
+ else:
+ self.__proxypeername = (destaddr, destport)
+
+ def getproxysockname(self):
+ """getsockname() -> address info
+ Returns the bound IP address and port number at the proxy.
+ """
+ return self.__proxysockname
+
+ def getproxypeername(self):
+ """getproxypeername() -> address info
+ Returns the IP and port number of the proxy.
+ """
+ return _orgsocket.getpeername(self)
+
+ def getpeername(self):
+ """getpeername() -> address info
+ Returns the IP address and port number of the destination
+ machine (note: getproxypeername returns the proxy)
+ """
+ return self.__proxypeername
+
+ def __negotiatesocks4(self, destaddr, destport):
+ """__negotiatesocks4(self,destaddr,destport)
+ Negotiates a connection through a SOCKS4 server.
+ """
+ # Check if the destination address provided is an IP address
+ rmtrslv = False
+ try:
+ ipaddr = socket.inet_aton(destaddr)
+ except socket.error:
+ # It's a DNS name. Check where it should be resolved.
+ if self.__proxy[3]:
+ ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01)
+ rmtrslv = True
+ else:
+ ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
+ # Construct the request packet
+ req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr
+ # The username parameter is considered userid for SOCKS4
+ if self.__proxy[4] != None:
+ req = req + self.__proxy[4]
+ req = req + chr(0x00).encode()
+ # DNS name if remote resolving is required
+ # NOTE: This is actually an extension to the SOCKS4 protocol
+ # called SOCKS4A and may not be supported in all cases.
+ if rmtrslv:
+ req = req + destaddr + chr(0x00).encode()
+ self.sendall(req)
+ # Get the response from the server
+ resp = self.__recvall(8)
+ if resp[0:1] != chr(0x00).encode():
+ # Bad data
+ self.close()
+ raise GeneralProxyError((1, _generalerrors[1]))
+ if resp[1:2] != chr(0x5A).encode():
+ # Server returned an error
+ self.close()
+ if ord(resp[1:2]) in (91, 92, 93):
+ self.close()
+ raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90]))
+ else:
+ raise Socks4Error((94, _socks4errors[4]))
+ # Get the bound address/port
+ self.__proxysockname = (
+ socket.inet_ntoa(resp[4:]),
+ struct.unpack(">H", resp[2:4])[0],
+ )
+ if rmtrslv != None:
+ self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
+ else:
+ self.__proxypeername = (destaddr, destport)
+
+ def __negotiatehttp(self, destaddr, destport):
+ """__negotiatehttp(self,destaddr,destport)
+ Negotiates a connection through an HTTP server.
+ """
+ # If we need to resolve locally, we do this now
+ if not self.__proxy[3]:
+ addr = socket.gethostbyname(destaddr)
+ else:
+ addr = destaddr
+ headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"]
+ wrote_host_header = False
+ wrote_auth_header = False
+ if self.__proxy[6] != None:
+ for key, val in self.__proxy[6].iteritems():
+ headers += [key, ": ", val, "\r\n"]
+ wrote_host_header = key.lower() == "host"
+ wrote_auth_header = key.lower() == "proxy-authorization"
+ if not wrote_host_header:
+ headers += ["Host: ", destaddr, "\r\n"]
+ if not wrote_auth_header:
+ if self.__proxy[4] != None and self.__proxy[5] != None:
+ headers += [self.__getauthheader(), "\r\n"]
+ headers.append("\r\n")
+ self.sendall("".join(headers).encode())
+ # We read the response until we get the string "\r\n\r\n"
+ resp = self.recv(1)
+ while resp.find("\r\n\r\n".encode()) == -1:
+ resp = resp + self.recv(1)
+ # We just need the first line to check if the connection
+ # was successful
+ statusline = resp.splitlines()[0].split(" ".encode(), 2)
+ if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()):
+ self.close()
+ raise GeneralProxyError((1, _generalerrors[1]))
+ try:
+ statuscode = int(statusline[1])
+ except ValueError:
+ self.close()
+ raise GeneralProxyError((1, _generalerrors[1]))
+ if statuscode != 200:
+ self.close()
+ raise HTTPError((statuscode, statusline[2]))
+ self.__proxysockname = ("0.0.0.0", 0)
+ self.__proxypeername = (addr, destport)
+
+ def connect(self, destpair):
+ """connect(self, despair)
+ Connects to the specified destination through a proxy.
+ destpar - A tuple of the IP/DNS address and the port number.
+ (identical to socket's connect).
+ To select the proxy server use setproxy().
+ """
+ # Do a minimal input check first
+ if (
+ (not type(destpair) in (list, tuple))
+ or (len(destpair) < 2)
+ or (not isinstance(destpair[0], (str, bytes)))
+ or (type(destpair[1]) != int)
+ ):
+ raise GeneralProxyError((5, _generalerrors[5]))
+ if self.__proxy[0] == PROXY_TYPE_SOCKS5:
+ if self.__proxy[2] != None:
+ portnum = self.__proxy[2]
+ else:
+ portnum = 1080
+ _orgsocket.connect(self, (self.__proxy[1], portnum))
+ self.__negotiatesocks5(destpair[0], destpair[1])
+ elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
+ if self.__proxy[2] != None:
+ portnum = self.__proxy[2]
+ else:
+ portnum = 1080
+ _orgsocket.connect(self, (self.__proxy[1], portnum))
+ self.__negotiatesocks4(destpair[0], destpair[1])
+ elif self.__proxy[0] == PROXY_TYPE_HTTP:
+ if self.__proxy[2] != None:
+ portnum = self.__proxy[2]
+ else:
+ portnum = 8080
+ _orgsocket.connect(self, (self.__proxy[1], portnum))
+ self.__negotiatehttp(destpair[0], destpair[1])
+ elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL:
+ if self.__proxy[2] != None:
+ portnum = self.__proxy[2]
+ else:
+ portnum = 8080
+ _orgsocket.connect(self, (self.__proxy[1], portnum))
+ if destpair[1] == 443:
+ self.__negotiatehttp(destpair[0], destpair[1])
+ else:
+ self.__httptunnel = False
+ elif self.__proxy[0] == None:
+ _orgsocket.connect(self, (destpair[0], destpair[1]))
+ else:
+ raise GeneralProxyError((4, _generalerrors[4]))
diff --git a/Lib/site-packages/idna-3.6.dist-info/INSTALLER b/Lib/site-packages/idna-3.6.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/Lib/site-packages/idna-3.6.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Lib/site-packages/idna-3.6.dist-info/LICENSE.md b/Lib/site-packages/idna-3.6.dist-info/LICENSE.md
new file mode 100644
index 0000000..ce36701
--- /dev/null
+++ b/Lib/site-packages/idna-3.6.dist-info/LICENSE.md
@@ -0,0 +1,31 @@
+BSD 3-Clause License
+
+Copyright (c) 2013-2023, Kim Davies and contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/Lib/site-packages/idna-3.6.dist-info/METADATA b/Lib/site-packages/idna-3.6.dist-info/METADATA
new file mode 100644
index 0000000..9be8060
--- /dev/null
+++ b/Lib/site-packages/idna-3.6.dist-info/METADATA
@@ -0,0 +1,243 @@
+Metadata-Version: 2.1
+Name: idna
+Version: 3.6
+Summary: Internationalized Domain Names in Applications (IDNA)
+Author-email: Kim Davies
+Requires-Python: >=3.5
+Description-Content-Type: text/x-rst
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: System Administrators
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: Name Service (DNS)
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Utilities
+Project-URL: Changelog, https://github.com/kjd/idna/blob/master/HISTORY.rst
+Project-URL: Issue tracker, https://github.com/kjd/idna/issues
+Project-URL: Source, https://github.com/kjd/idna
+
+Internationalized Domain Names in Applications (IDNA)
+=====================================================
+
+Support for the Internationalized Domain Names in
+Applications (IDNA) protocol as specified in `RFC 5891
+`_. This is the latest version of
+the protocol and is sometimes referred to as “IDNA 2008”.
+
+This library also provides support for Unicode Technical
+Standard 46, `Unicode IDNA Compatibility Processing
+`_.
+
+This acts as a suitable replacement for the “encodings.idna”
+module that comes with the Python standard library, but which
+only supports the older superseded IDNA specification (`RFC 3490
+`_).
+
+Basic functions are simply executed:
+
+.. code-block:: pycon
+
+ >>> import idna
+ >>> idna.encode('ドメイン.テスト')
+ b'xn--eckwd4c7c.xn--zckzah'
+ >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah'))
+ ドメイン.テスト
+
+
+Installation
+------------
+
+This package is available for installation from PyPI:
+
+.. code-block:: bash
+
+ $ python3 -m pip install idna
+
+
+Usage
+-----
+
+For typical usage, the ``encode`` and ``decode`` functions will take a
+domain name argument and perform a conversion to A-labels or U-labels
+respectively.
+
+.. code-block:: pycon
+
+ >>> import idna
+ >>> idna.encode('ドメイン.テスト')
+ b'xn--eckwd4c7c.xn--zckzah'
+ >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah'))
+ ドメイン.テスト
+
+You may use the codec encoding and decoding methods using the
+``idna.codec`` module:
+
+.. code-block:: pycon
+
+ >>> import idna.codec
+ >>> print('домен.испытание'.encode('idna2008'))
+ b'xn--d1acufc.xn--80akhbyknj4f'
+ >>> print(b'xn--d1acufc.xn--80akhbyknj4f'.decode('idna2008'))
+ домен.испытание
+
+Conversions can be applied at a per-label basis using the ``ulabel`` or
+``alabel`` functions if necessary:
+
+.. code-block:: pycon
+
+ >>> idna.alabel('测试')
+ b'xn--0zwm56d'
+
+Compatibility Mapping (UTS #46)
++++++++++++++++++++++++++++++++
+
+As described in `RFC 5895 `_, the
+IDNA specification does not normalize input from different potential
+ways a user may input a domain name. This functionality, known as
+a “mapping”, is considered by the specification to be a local
+user-interface issue distinct from IDNA conversion functionality.
+
+This library provides one such mapping that was developed by the
+Unicode Consortium. Known as `Unicode IDNA Compatibility Processing
+`_, it provides for both a regular
+mapping for typical applications, as well as a transitional mapping to
+help migrate from older IDNA 2003 applications.
+
+For example, “Königsgäßchen” is not a permissible label as *LATIN
+CAPITAL LETTER K* is not allowed (nor are capital letters in general).
+UTS 46 will convert this into lower case prior to applying the IDNA
+conversion.
+
+.. code-block:: pycon
+
+ >>> import idna
+ >>> idna.encode('Königsgäßchen')
+ ...
+ idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed
+ >>> idna.encode('Königsgäßchen', uts46=True)
+ b'xn--knigsgchen-b4a3dun'
+ >>> print(idna.decode('xn--knigsgchen-b4a3dun'))
+ königsgäßchen
+
+Transitional processing provides conversions to help transition from
+the older 2003 standard to the current standard. For example, in the
+original IDNA specification, the *LATIN SMALL LETTER SHARP S* (ß) was
+converted into two *LATIN SMALL LETTER S* (ss), whereas in the current
+IDNA specification this conversion is not performed.
+
+.. code-block:: pycon
+
+ >>> idna.encode('Königsgäßchen', uts46=True, transitional=True)
+ 'xn--knigsgsschen-lcb0w'
+
+Implementers should use transitional processing with caution, only in
+rare cases where conversion from legacy labels to current labels must be
+performed (i.e. IDNA implementations that pre-date 2008). For typical
+applications that just need to convert labels, transitional processing
+is unlikely to be beneficial and could produce unexpected incompatible
+results.
+
+``encodings.idna`` Compatibility
+++++++++++++++++++++++++++++++++
+
+Function calls from the Python built-in ``encodings.idna`` module are
+mapped to their IDNA 2008 equivalents using the ``idna.compat`` module.
+Simply substitute the ``import`` clause in your code to refer to the new
+module name.
+
+Exceptions
+----------
+
+All errors raised during the conversion following the specification
+should raise an exception derived from the ``idna.IDNAError`` base
+class.
+
+More specific exceptions that may be generated as ``idna.IDNABidiError``
+when the error reflects an illegal combination of left-to-right and
+right-to-left characters in a label; ``idna.InvalidCodepoint`` when
+a specific codepoint is an illegal character in an IDN label (i.e.
+INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is
+illegal based on its positional context (i.e. it is CONTEXTO or CONTEXTJ
+but the contextual requirements are not satisfied.)
+
+Building and Diagnostics
+------------------------
+
+The IDNA and UTS 46 functionality relies upon pre-calculated lookup
+tables for performance. These tables are derived from computing against
+eligibility criteria in the respective standards. These tables are
+computed using the command-line script ``tools/idna-data``.
+
+This tool will fetch relevant codepoint data from the Unicode repository
+and perform the required calculations to identify eligibility. There are
+three main modes:
+
+* ``idna-data make-libdata``. Generates ``idnadata.py`` and
+ ``uts46data.py``, the pre-calculated lookup tables used for IDNA and
+ UTS 46 conversions. Implementers who wish to track this library against
+ a different Unicode version may use this tool to manually generate a
+ different version of the ``idnadata.py`` and ``uts46data.py`` files.
+
+* ``idna-data make-table``. Generate a table of the IDNA disposition
+ (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix
+ B.1 of RFC 5892 and the pre-computed tables published by `IANA
+ `_.
+
+* ``idna-data U+0061``. Prints debugging output on the various
+ properties associated with an individual Unicode codepoint (in this
+ case, U+0061), that are used to assess the IDNA and UTS 46 status of a
+ codepoint. This is helpful in debugging or analysis.
+
+The tool accepts a number of arguments, described using ``idna-data
+-h``. Most notably, the ``--version`` argument allows the specification
+of the version of Unicode to be used in computing the table data. For
+example, ``idna-data --version 9.0.0 make-libdata`` will generate
+library data against Unicode 9.0.0.
+
+
+Additional Notes
+----------------
+
+* **Packages**. The latest tagged release version is published in the
+ `Python Package Index `_.
+
+* **Version support**. This library supports Python 3.5 and higher.
+ As this library serves as a low-level toolkit for a variety of
+ applications, many of which strive for broad compatibility with older
+ Python versions, there is no rush to remove older interpreter support.
+ Removing support for older versions should be well justified in that the
+ maintenance burden has become too high.
+
+* **Python 2**. Python 2 is supported by version 2.x of this library.
+ While active development of the version 2.x series has ended, notable
+ issues being corrected may be backported to 2.x. Use "idna<3" in your
+ requirements file if you need this library for a Python 2 application.
+
+* **Testing**. The library has a test suite based on each rule of the
+ IDNA specification, as well as tests that are provided as part of the
+ Unicode Technical Standard 46, `Unicode IDNA Compatibility Processing
+ `_.
+
+* **Emoji**. It is an occasional request to support emoji domains in
+ this library. Encoding of symbols like emoji is expressly prohibited by
+ the technical standard IDNA 2008 and emoji domains are broadly phased
+ out across the domain industry due to associated security risks. For
+ now, applications that need to support these non-compliant labels
+ may wish to consider trying the encode/decode operation in this library
+ first, and then falling back to using `encodings.idna`. See `the Github
+ project `_ for more discussion.
+
diff --git a/Lib/site-packages/idna-3.6.dist-info/RECORD b/Lib/site-packages/idna-3.6.dist-info/RECORD
new file mode 100644
index 0000000..1a02c2a
--- /dev/null
+++ b/Lib/site-packages/idna-3.6.dist-info/RECORD
@@ -0,0 +1,23 @@
+idna-3.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+idna-3.6.dist-info/LICENSE.md,sha256=yy-vDKGMbTh-x8tm8yGTn7puZ-nawJ0xR3y52NP-aJk,1541
+idna-3.6.dist-info/METADATA,sha256=N93B509dkvvkd_Y0E_VxCHPkVkrD6InxoyfXvX4egds,9888
+idna-3.6.dist-info/RECORD,,
+idna-3.6.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+idna-3.6.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
+idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849
+idna/__pycache__/__init__.cpython-312.pyc,,
+idna/__pycache__/codec.cpython-312.pyc,,
+idna/__pycache__/compat.cpython-312.pyc,,
+idna/__pycache__/core.cpython-312.pyc,,
+idna/__pycache__/idnadata.cpython-312.pyc,,
+idna/__pycache__/intranges.cpython-312.pyc,,
+idna/__pycache__/package_data.cpython-312.pyc,,
+idna/__pycache__/uts46data.cpython-312.pyc,,
+idna/codec.py,sha256=PS6m-XmdST7Wj7J7ulRMakPDt5EBJyYrT3CPtjh-7t4,3426
+idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321
+idna/core.py,sha256=Bxz9L1rH0N5U-yukGfPuDRTxR2jDUl96NCq1ql3YAUw,12908
+idna/idnadata.py,sha256=9u3Ec_GRrhlcbs7QM3pAZ2ObEQzPIOm99FaVOm91UGg,44351
+idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881
+idna/package_data.py,sha256=y-iv-qJdmHsWVR5FszYwsMo1AQg8qpdU2aU5nT-S2oQ,21
+idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+idna/uts46data.py,sha256=1KuksWqLuccPXm2uyRVkhfiFLNIhM_H2m4azCcnOqEU,206503
diff --git a/Lib/site-packages/idna-3.6.dist-info/REQUESTED b/Lib/site-packages/idna-3.6.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/idna-3.6.dist-info/WHEEL b/Lib/site-packages/idna-3.6.dist-info/WHEEL
new file mode 100644
index 0000000..3b5e64b
--- /dev/null
+++ b/Lib/site-packages/idna-3.6.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: flit 3.9.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/Lib/site-packages/idna/__init__.py b/Lib/site-packages/idna/__init__.py
new file mode 100644
index 0000000..a40eeaf
--- /dev/null
+++ b/Lib/site-packages/idna/__init__.py
@@ -0,0 +1,44 @@
+from .package_data import __version__
+from .core import (
+ IDNABidiError,
+ IDNAError,
+ InvalidCodepoint,
+ InvalidCodepointContext,
+ alabel,
+ check_bidi,
+ check_hyphen_ok,
+ check_initial_combiner,
+ check_label,
+ check_nfc,
+ decode,
+ encode,
+ ulabel,
+ uts46_remap,
+ valid_contextj,
+ valid_contexto,
+ valid_label_length,
+ valid_string_length,
+)
+from .intranges import intranges_contain
+
+__all__ = [
+ "IDNABidiError",
+ "IDNAError",
+ "InvalidCodepoint",
+ "InvalidCodepointContext",
+ "alabel",
+ "check_bidi",
+ "check_hyphen_ok",
+ "check_initial_combiner",
+ "check_label",
+ "check_nfc",
+ "decode",
+ "encode",
+ "intranges_contain",
+ "ulabel",
+ "uts46_remap",
+ "valid_contextj",
+ "valid_contexto",
+ "valid_label_length",
+ "valid_string_length",
+]
diff --git a/Lib/site-packages/idna/codec.py b/Lib/site-packages/idna/codec.py
new file mode 100644
index 0000000..c855a4d
--- /dev/null
+++ b/Lib/site-packages/idna/codec.py
@@ -0,0 +1,118 @@
+from .core import encode, decode, alabel, ulabel, IDNAError
+import codecs
+import re
+from typing import Any, Tuple, Optional
+
+_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]')
+
+class Codec(codecs.Codec):
+
+ def encode(self, data: str, errors: str = 'strict') -> Tuple[bytes, int]:
+ if errors != 'strict':
+ raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
+
+ if not data:
+ return b"", 0
+
+ return encode(data), len(data)
+
+ def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]:
+ if errors != 'strict':
+ raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
+
+ if not data:
+ return '', 0
+
+ return decode(data), len(data)
+
+class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
+ def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]:
+ if errors != 'strict':
+ raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
+
+ if not data:
+ return b'', 0
+
+ labels = _unicode_dots_re.split(data)
+ trailing_dot = b''
+ if labels:
+ if not labels[-1]:
+ trailing_dot = b'.'
+ del labels[-1]
+ elif not final:
+ # Keep potentially unfinished label until the next call
+ del labels[-1]
+ if labels:
+ trailing_dot = b'.'
+
+ result = []
+ size = 0
+ for label in labels:
+ result.append(alabel(label))
+ if size:
+ size += 1
+ size += len(label)
+
+ # Join with U+002E
+ result_bytes = b'.'.join(result) + trailing_dot
+ size += len(trailing_dot)
+ return result_bytes, size
+
+class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
+ def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]:
+ if errors != 'strict':
+ raise IDNAError('Unsupported error handling \"{}\"'.format(errors))
+
+ if not data:
+ return ('', 0)
+
+ if not isinstance(data, str):
+ data = str(data, 'ascii')
+
+ labels = _unicode_dots_re.split(data)
+ trailing_dot = ''
+ if labels:
+ if not labels[-1]:
+ trailing_dot = '.'
+ del labels[-1]
+ elif not final:
+ # Keep potentially unfinished label until the next call
+ del labels[-1]
+ if labels:
+ trailing_dot = '.'
+
+ result = []
+ size = 0
+ for label in labels:
+ result.append(ulabel(label))
+ if size:
+ size += 1
+ size += len(label)
+
+ result_str = '.'.join(result) + trailing_dot
+ size += len(trailing_dot)
+ return (result_str, size)
+
+
+class StreamWriter(Codec, codecs.StreamWriter):
+ pass
+
+
+class StreamReader(Codec, codecs.StreamReader):
+ pass
+
+
+def search_function(name: str) -> Optional[codecs.CodecInfo]:
+ if name != 'idna2008':
+ return None
+ return codecs.CodecInfo(
+ name=name,
+ encode=Codec().encode,
+ decode=Codec().decode,
+ incrementalencoder=IncrementalEncoder,
+ incrementaldecoder=IncrementalDecoder,
+ streamwriter=StreamWriter,
+ streamreader=StreamReader,
+ )
+
+codecs.register(search_function)
diff --git a/Lib/site-packages/idna/compat.py b/Lib/site-packages/idna/compat.py
new file mode 100644
index 0000000..786e6bd
--- /dev/null
+++ b/Lib/site-packages/idna/compat.py
@@ -0,0 +1,13 @@
+from .core import *
+from .codec import *
+from typing import Any, Union
+
+def ToASCII(label: str) -> bytes:
+ return encode(label)
+
+def ToUnicode(label: Union[bytes, bytearray]) -> str:
+ return decode(label)
+
+def nameprep(s: Any) -> None:
+ raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol')
+
diff --git a/Lib/site-packages/idna/core.py b/Lib/site-packages/idna/core.py
new file mode 100644
index 0000000..aaf7d65
--- /dev/null
+++ b/Lib/site-packages/idna/core.py
@@ -0,0 +1,400 @@
+from . import idnadata
+import bisect
+import unicodedata
+import re
+from typing import Union, Optional
+from .intranges import intranges_contain
+
+_virama_combining_class = 9
+_alabel_prefix = b'xn--'
+_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]')
+
+class IDNAError(UnicodeError):
+ """ Base exception for all IDNA-encoding related problems """
+ pass
+
+
+class IDNABidiError(IDNAError):
+ """ Exception when bidirectional requirements are not satisfied """
+ pass
+
+
+class InvalidCodepoint(IDNAError):
+ """ Exception when a disallowed or unallocated codepoint is used """
+ pass
+
+
+class InvalidCodepointContext(IDNAError):
+ """ Exception when the codepoint is not valid in the context it is used """
+ pass
+
+
+def _combining_class(cp: int) -> int:
+ v = unicodedata.combining(chr(cp))
+ if v == 0:
+ if not unicodedata.name(chr(cp)):
+ raise ValueError('Unknown character in unicodedata')
+ return v
+
+def _is_script(cp: str, script: str) -> bool:
+ return intranges_contain(ord(cp), idnadata.scripts[script])
+
+def _punycode(s: str) -> bytes:
+ return s.encode('punycode')
+
+def _unot(s: int) -> str:
+ return 'U+{:04X}'.format(s)
+
+
+def valid_label_length(label: Union[bytes, str]) -> bool:
+ if len(label) > 63:
+ return False
+ return True
+
+
+def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool:
+ if len(label) > (254 if trailing_dot else 253):
+ return False
+ return True
+
+
+def check_bidi(label: str, check_ltr: bool = False) -> bool:
+ # Bidi rules should only be applied if string contains RTL characters
+ bidi_label = False
+ for (idx, cp) in enumerate(label, 1):
+ direction = unicodedata.bidirectional(cp)
+ if direction == '':
+ # String likely comes from a newer version of Unicode
+ raise IDNABidiError('Unknown directionality in label {} at position {}'.format(repr(label), idx))
+ if direction in ['R', 'AL', 'AN']:
+ bidi_label = True
+ if not bidi_label and not check_ltr:
+ return True
+
+ # Bidi rule 1
+ direction = unicodedata.bidirectional(label[0])
+ if direction in ['R', 'AL']:
+ rtl = True
+ elif direction == 'L':
+ rtl = False
+ else:
+ raise IDNABidiError('First codepoint in label {} must be directionality L, R or AL'.format(repr(label)))
+
+ valid_ending = False
+ number_type = None # type: Optional[str]
+ for (idx, cp) in enumerate(label, 1):
+ direction = unicodedata.bidirectional(cp)
+
+ if rtl:
+ # Bidi rule 2
+ if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
+ raise IDNABidiError('Invalid direction for codepoint at position {} in a right-to-left label'.format(idx))
+ # Bidi rule 3
+ if direction in ['R', 'AL', 'EN', 'AN']:
+ valid_ending = True
+ elif direction != 'NSM':
+ valid_ending = False
+ # Bidi rule 4
+ if direction in ['AN', 'EN']:
+ if not number_type:
+ number_type = direction
+ else:
+ if number_type != direction:
+ raise IDNABidiError('Can not mix numeral types in a right-to-left label')
+ else:
+ # Bidi rule 5
+ if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
+ raise IDNABidiError('Invalid direction for codepoint at position {} in a left-to-right label'.format(idx))
+ # Bidi rule 6
+ if direction in ['L', 'EN']:
+ valid_ending = True
+ elif direction != 'NSM':
+ valid_ending = False
+
+ if not valid_ending:
+ raise IDNABidiError('Label ends with illegal codepoint directionality')
+
+ return True
+
+
+def check_initial_combiner(label: str) -> bool:
+ if unicodedata.category(label[0])[0] == 'M':
+ raise IDNAError('Label begins with an illegal combining character')
+ return True
+
+
+def check_hyphen_ok(label: str) -> bool:
+ if label[2:4] == '--':
+ raise IDNAError('Label has disallowed hyphens in 3rd and 4th position')
+ if label[0] == '-' or label[-1] == '-':
+ raise IDNAError('Label must not start or end with a hyphen')
+ return True
+
+
+def check_nfc(label: str) -> None:
+ if unicodedata.normalize('NFC', label) != label:
+ raise IDNAError('Label must be in Normalization Form C')
+
+
+def valid_contextj(label: str, pos: int) -> bool:
+ cp_value = ord(label[pos])
+
+ if cp_value == 0x200c:
+
+ if pos > 0:
+ if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
+ return True
+
+ ok = False
+ for i in range(pos-1, -1, -1):
+ joining_type = idnadata.joining_types.get(ord(label[i]))
+ if joining_type == ord('T'):
+ continue
+ if joining_type in [ord('L'), ord('D')]:
+ ok = True
+ break
+
+ if not ok:
+ return False
+
+ ok = False
+ for i in range(pos+1, len(label)):
+ joining_type = idnadata.joining_types.get(ord(label[i]))
+ if joining_type == ord('T'):
+ continue
+ if joining_type in [ord('R'), ord('D')]:
+ ok = True
+ break
+ return ok
+
+ if cp_value == 0x200d:
+
+ if pos > 0:
+ if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
+ return True
+ return False
+
+ else:
+
+ return False
+
+
+def valid_contexto(label: str, pos: int, exception: bool = False) -> bool:
+ cp_value = ord(label[pos])
+
+ if cp_value == 0x00b7:
+ if 0 < pos < len(label)-1:
+ if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c:
+ return True
+ return False
+
+ elif cp_value == 0x0375:
+ if pos < len(label)-1 and len(label) > 1:
+ return _is_script(label[pos + 1], 'Greek')
+ return False
+
+ elif cp_value == 0x05f3 or cp_value == 0x05f4:
+ if pos > 0:
+ return _is_script(label[pos - 1], 'Hebrew')
+ return False
+
+ elif cp_value == 0x30fb:
+ for cp in label:
+ if cp == '\u30fb':
+ continue
+ if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'):
+ return True
+ return False
+
+ elif 0x660 <= cp_value <= 0x669:
+ for cp in label:
+ if 0x6f0 <= ord(cp) <= 0x06f9:
+ return False
+ return True
+
+ elif 0x6f0 <= cp_value <= 0x6f9:
+ for cp in label:
+ if 0x660 <= ord(cp) <= 0x0669:
+ return False
+ return True
+
+ return False
+
+
+def check_label(label: Union[str, bytes, bytearray]) -> None:
+ if isinstance(label, (bytes, bytearray)):
+ label = label.decode('utf-8')
+ if len(label) == 0:
+ raise IDNAError('Empty Label')
+
+ check_nfc(label)
+ check_hyphen_ok(label)
+ check_initial_combiner(label)
+
+ for (pos, cp) in enumerate(label):
+ cp_value = ord(cp)
+ if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']):
+ continue
+ elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']):
+ try:
+ if not valid_contextj(label, pos):
+ raise InvalidCodepointContext('Joiner {} not allowed at position {} in {}'.format(
+ _unot(cp_value), pos+1, repr(label)))
+ except ValueError:
+ raise IDNAError('Unknown codepoint adjacent to joiner {} at position {} in {}'.format(
+ _unot(cp_value), pos+1, repr(label)))
+ elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']):
+ if not valid_contexto(label, pos):
+ raise InvalidCodepointContext('Codepoint {} not allowed at position {} in {}'.format(_unot(cp_value), pos+1, repr(label)))
+ else:
+ raise InvalidCodepoint('Codepoint {} at position {} of {} not allowed'.format(_unot(cp_value), pos+1, repr(label)))
+
+ check_bidi(label)
+
+
+def alabel(label: str) -> bytes:
+ try:
+ label_bytes = label.encode('ascii')
+ ulabel(label_bytes)
+ if not valid_label_length(label_bytes):
+ raise IDNAError('Label too long')
+ return label_bytes
+ except UnicodeEncodeError:
+ pass
+
+ if not label:
+ raise IDNAError('No Input')
+
+ label = str(label)
+ check_label(label)
+ label_bytes = _punycode(label)
+ label_bytes = _alabel_prefix + label_bytes
+
+ if not valid_label_length(label_bytes):
+ raise IDNAError('Label too long')
+
+ return label_bytes
+
+
+def ulabel(label: Union[str, bytes, bytearray]) -> str:
+ if not isinstance(label, (bytes, bytearray)):
+ try:
+ label_bytes = label.encode('ascii')
+ except UnicodeEncodeError:
+ check_label(label)
+ return label
+ else:
+ label_bytes = label
+
+ label_bytes = label_bytes.lower()
+ if label_bytes.startswith(_alabel_prefix):
+ label_bytes = label_bytes[len(_alabel_prefix):]
+ if not label_bytes:
+ raise IDNAError('Malformed A-label, no Punycode eligible content found')
+ if label_bytes.decode('ascii')[-1] == '-':
+ raise IDNAError('A-label must not end with a hyphen')
+ else:
+ check_label(label_bytes)
+ return label_bytes.decode('ascii')
+
+ try:
+ label = label_bytes.decode('punycode')
+ except UnicodeError:
+ raise IDNAError('Invalid A-label')
+ check_label(label)
+ return label
+
+
+def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str:
+ """Re-map the characters in the string according to UTS46 processing."""
+ from .uts46data import uts46data
+ output = ''
+
+ for pos, char in enumerate(domain):
+ code_point = ord(char)
+ try:
+ uts46row = uts46data[code_point if code_point < 256 else
+ bisect.bisect_left(uts46data, (code_point, 'Z')) - 1]
+ status = uts46row[1]
+ replacement = None # type: Optional[str]
+ if len(uts46row) == 3:
+ replacement = uts46row[2]
+ if (status == 'V' or
+ (status == 'D' and not transitional) or
+ (status == '3' and not std3_rules and replacement is None)):
+ output += char
+ elif replacement is not None and (status == 'M' or
+ (status == '3' and not std3_rules) or
+ (status == 'D' and transitional)):
+ output += replacement
+ elif status != 'I':
+ raise IndexError()
+ except IndexError:
+ raise InvalidCodepoint(
+ 'Codepoint {} not allowed at position {} in {}'.format(
+ _unot(code_point), pos + 1, repr(domain)))
+
+ return unicodedata.normalize('NFC', output)
+
+
+def encode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False, transitional: bool = False) -> bytes:
+ if not isinstance(s, str):
+ try:
+ s = str(s, 'ascii')
+ except UnicodeDecodeError:
+ raise IDNAError('should pass a unicode string to the function rather than a byte string.')
+ if uts46:
+ s = uts46_remap(s, std3_rules, transitional)
+ trailing_dot = False
+ result = []
+ if strict:
+ labels = s.split('.')
+ else:
+ labels = _unicode_dots_re.split(s)
+ if not labels or labels == ['']:
+ raise IDNAError('Empty domain')
+ if labels[-1] == '':
+ del labels[-1]
+ trailing_dot = True
+ for label in labels:
+ s = alabel(label)
+ if s:
+ result.append(s)
+ else:
+ raise IDNAError('Empty label')
+ if trailing_dot:
+ result.append(b'')
+ s = b'.'.join(result)
+ if not valid_string_length(s, trailing_dot):
+ raise IDNAError('Domain too long')
+ return s
+
+
+def decode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False) -> str:
+ try:
+ if not isinstance(s, str):
+ s = str(s, 'ascii')
+ except UnicodeDecodeError:
+ raise IDNAError('Invalid ASCII in A-label')
+ if uts46:
+ s = uts46_remap(s, std3_rules, False)
+ trailing_dot = False
+ result = []
+ if not strict:
+ labels = _unicode_dots_re.split(s)
+ else:
+ labels = s.split('.')
+ if not labels or labels == ['']:
+ raise IDNAError('Empty domain')
+ if not labels[-1]:
+ del labels[-1]
+ trailing_dot = True
+ for label in labels:
+ s = ulabel(label)
+ if s:
+ result.append(s)
+ else:
+ raise IDNAError('Empty label')
+ if trailing_dot:
+ result.append('')
+ return '.'.join(result)
diff --git a/Lib/site-packages/idna/idnadata.py b/Lib/site-packages/idna/idnadata.py
new file mode 100644
index 0000000..5cd05d9
--- /dev/null
+++ b/Lib/site-packages/idna/idnadata.py
@@ -0,0 +1,2150 @@
+# This file is automatically generated by tools/idna-data
+
+__version__ = '15.1.0'
+scripts = {
+ 'Greek': (
+ 0x37000000374,
+ 0x37500000378,
+ 0x37a0000037e,
+ 0x37f00000380,
+ 0x38400000385,
+ 0x38600000387,
+ 0x3880000038b,
+ 0x38c0000038d,
+ 0x38e000003a2,
+ 0x3a3000003e2,
+ 0x3f000000400,
+ 0x1d2600001d2b,
+ 0x1d5d00001d62,
+ 0x1d6600001d6b,
+ 0x1dbf00001dc0,
+ 0x1f0000001f16,
+ 0x1f1800001f1e,
+ 0x1f2000001f46,
+ 0x1f4800001f4e,
+ 0x1f5000001f58,
+ 0x1f5900001f5a,
+ 0x1f5b00001f5c,
+ 0x1f5d00001f5e,
+ 0x1f5f00001f7e,
+ 0x1f8000001fb5,
+ 0x1fb600001fc5,
+ 0x1fc600001fd4,
+ 0x1fd600001fdc,
+ 0x1fdd00001ff0,
+ 0x1ff200001ff5,
+ 0x1ff600001fff,
+ 0x212600002127,
+ 0xab650000ab66,
+ 0x101400001018f,
+ 0x101a0000101a1,
+ 0x1d2000001d246,
+ ),
+ 'Han': (
+ 0x2e8000002e9a,
+ 0x2e9b00002ef4,
+ 0x2f0000002fd6,
+ 0x300500003006,
+ 0x300700003008,
+ 0x30210000302a,
+ 0x30380000303c,
+ 0x340000004dc0,
+ 0x4e000000a000,
+ 0xf9000000fa6e,
+ 0xfa700000fada,
+ 0x16fe200016fe4,
+ 0x16ff000016ff2,
+ 0x200000002a6e0,
+ 0x2a7000002b73a,
+ 0x2b7400002b81e,
+ 0x2b8200002cea2,
+ 0x2ceb00002ebe1,
+ 0x2ebf00002ee5e,
+ 0x2f8000002fa1e,
+ 0x300000003134b,
+ 0x31350000323b0,
+ ),
+ 'Hebrew': (
+ 0x591000005c8,
+ 0x5d0000005eb,
+ 0x5ef000005f5,
+ 0xfb1d0000fb37,
+ 0xfb380000fb3d,
+ 0xfb3e0000fb3f,
+ 0xfb400000fb42,
+ 0xfb430000fb45,
+ 0xfb460000fb50,
+ ),
+ 'Hiragana': (
+ 0x304100003097,
+ 0x309d000030a0,
+ 0x1b0010001b120,
+ 0x1b1320001b133,
+ 0x1b1500001b153,
+ 0x1f2000001f201,
+ ),
+ 'Katakana': (
+ 0x30a1000030fb,
+ 0x30fd00003100,
+ 0x31f000003200,
+ 0x32d0000032ff,
+ 0x330000003358,
+ 0xff660000ff70,
+ 0xff710000ff9e,
+ 0x1aff00001aff4,
+ 0x1aff50001affc,
+ 0x1affd0001afff,
+ 0x1b0000001b001,
+ 0x1b1200001b123,
+ 0x1b1550001b156,
+ 0x1b1640001b168,
+ ),
+}
+joining_types = {
+ 0x600: 85,
+ 0x601: 85,
+ 0x602: 85,
+ 0x603: 85,
+ 0x604: 85,
+ 0x605: 85,
+ 0x608: 85,
+ 0x60b: 85,
+ 0x620: 68,
+ 0x621: 85,
+ 0x622: 82,
+ 0x623: 82,
+ 0x624: 82,
+ 0x625: 82,
+ 0x626: 68,
+ 0x627: 82,
+ 0x628: 68,
+ 0x629: 82,
+ 0x62a: 68,
+ 0x62b: 68,
+ 0x62c: 68,
+ 0x62d: 68,
+ 0x62e: 68,
+ 0x62f: 82,
+ 0x630: 82,
+ 0x631: 82,
+ 0x632: 82,
+ 0x633: 68,
+ 0x634: 68,
+ 0x635: 68,
+ 0x636: 68,
+ 0x637: 68,
+ 0x638: 68,
+ 0x639: 68,
+ 0x63a: 68,
+ 0x63b: 68,
+ 0x63c: 68,
+ 0x63d: 68,
+ 0x63e: 68,
+ 0x63f: 68,
+ 0x640: 67,
+ 0x641: 68,
+ 0x642: 68,
+ 0x643: 68,
+ 0x644: 68,
+ 0x645: 68,
+ 0x646: 68,
+ 0x647: 68,
+ 0x648: 82,
+ 0x649: 68,
+ 0x64a: 68,
+ 0x66e: 68,
+ 0x66f: 68,
+ 0x671: 82,
+ 0x672: 82,
+ 0x673: 82,
+ 0x674: 85,
+ 0x675: 82,
+ 0x676: 82,
+ 0x677: 82,
+ 0x678: 68,
+ 0x679: 68,
+ 0x67a: 68,
+ 0x67b: 68,
+ 0x67c: 68,
+ 0x67d: 68,
+ 0x67e: 68,
+ 0x67f: 68,
+ 0x680: 68,
+ 0x681: 68,
+ 0x682: 68,
+ 0x683: 68,
+ 0x684: 68,
+ 0x685: 68,
+ 0x686: 68,
+ 0x687: 68,
+ 0x688: 82,
+ 0x689: 82,
+ 0x68a: 82,
+ 0x68b: 82,
+ 0x68c: 82,
+ 0x68d: 82,
+ 0x68e: 82,
+ 0x68f: 82,
+ 0x690: 82,
+ 0x691: 82,
+ 0x692: 82,
+ 0x693: 82,
+ 0x694: 82,
+ 0x695: 82,
+ 0x696: 82,
+ 0x697: 82,
+ 0x698: 82,
+ 0x699: 82,
+ 0x69a: 68,
+ 0x69b: 68,
+ 0x69c: 68,
+ 0x69d: 68,
+ 0x69e: 68,
+ 0x69f: 68,
+ 0x6a0: 68,
+ 0x6a1: 68,
+ 0x6a2: 68,
+ 0x6a3: 68,
+ 0x6a4: 68,
+ 0x6a5: 68,
+ 0x6a6: 68,
+ 0x6a7: 68,
+ 0x6a8: 68,
+ 0x6a9: 68,
+ 0x6aa: 68,
+ 0x6ab: 68,
+ 0x6ac: 68,
+ 0x6ad: 68,
+ 0x6ae: 68,
+ 0x6af: 68,
+ 0x6b0: 68,
+ 0x6b1: 68,
+ 0x6b2: 68,
+ 0x6b3: 68,
+ 0x6b4: 68,
+ 0x6b5: 68,
+ 0x6b6: 68,
+ 0x6b7: 68,
+ 0x6b8: 68,
+ 0x6b9: 68,
+ 0x6ba: 68,
+ 0x6bb: 68,
+ 0x6bc: 68,
+ 0x6bd: 68,
+ 0x6be: 68,
+ 0x6bf: 68,
+ 0x6c0: 82,
+ 0x6c1: 68,
+ 0x6c2: 68,
+ 0x6c3: 82,
+ 0x6c4: 82,
+ 0x6c5: 82,
+ 0x6c6: 82,
+ 0x6c7: 82,
+ 0x6c8: 82,
+ 0x6c9: 82,
+ 0x6ca: 82,
+ 0x6cb: 82,
+ 0x6cc: 68,
+ 0x6cd: 82,
+ 0x6ce: 68,
+ 0x6cf: 82,
+ 0x6d0: 68,
+ 0x6d1: 68,
+ 0x6d2: 82,
+ 0x6d3: 82,
+ 0x6d5: 82,
+ 0x6dd: 85,
+ 0x6ee: 82,
+ 0x6ef: 82,
+ 0x6fa: 68,
+ 0x6fb: 68,
+ 0x6fc: 68,
+ 0x6ff: 68,
+ 0x70f: 84,
+ 0x710: 82,
+ 0x712: 68,
+ 0x713: 68,
+ 0x714: 68,
+ 0x715: 82,
+ 0x716: 82,
+ 0x717: 82,
+ 0x718: 82,
+ 0x719: 82,
+ 0x71a: 68,
+ 0x71b: 68,
+ 0x71c: 68,
+ 0x71d: 68,
+ 0x71e: 82,
+ 0x71f: 68,
+ 0x720: 68,
+ 0x721: 68,
+ 0x722: 68,
+ 0x723: 68,
+ 0x724: 68,
+ 0x725: 68,
+ 0x726: 68,
+ 0x727: 68,
+ 0x728: 82,
+ 0x729: 68,
+ 0x72a: 82,
+ 0x72b: 68,
+ 0x72c: 82,
+ 0x72d: 68,
+ 0x72e: 68,
+ 0x72f: 82,
+ 0x74d: 82,
+ 0x74e: 68,
+ 0x74f: 68,
+ 0x750: 68,
+ 0x751: 68,
+ 0x752: 68,
+ 0x753: 68,
+ 0x754: 68,
+ 0x755: 68,
+ 0x756: 68,
+ 0x757: 68,
+ 0x758: 68,
+ 0x759: 82,
+ 0x75a: 82,
+ 0x75b: 82,
+ 0x75c: 68,
+ 0x75d: 68,
+ 0x75e: 68,
+ 0x75f: 68,
+ 0x760: 68,
+ 0x761: 68,
+ 0x762: 68,
+ 0x763: 68,
+ 0x764: 68,
+ 0x765: 68,
+ 0x766: 68,
+ 0x767: 68,
+ 0x768: 68,
+ 0x769: 68,
+ 0x76a: 68,
+ 0x76b: 82,
+ 0x76c: 82,
+ 0x76d: 68,
+ 0x76e: 68,
+ 0x76f: 68,
+ 0x770: 68,
+ 0x771: 82,
+ 0x772: 68,
+ 0x773: 82,
+ 0x774: 82,
+ 0x775: 68,
+ 0x776: 68,
+ 0x777: 68,
+ 0x778: 82,
+ 0x779: 82,
+ 0x77a: 68,
+ 0x77b: 68,
+ 0x77c: 68,
+ 0x77d: 68,
+ 0x77e: 68,
+ 0x77f: 68,
+ 0x7ca: 68,
+ 0x7cb: 68,
+ 0x7cc: 68,
+ 0x7cd: 68,
+ 0x7ce: 68,
+ 0x7cf: 68,
+ 0x7d0: 68,
+ 0x7d1: 68,
+ 0x7d2: 68,
+ 0x7d3: 68,
+ 0x7d4: 68,
+ 0x7d5: 68,
+ 0x7d6: 68,
+ 0x7d7: 68,
+ 0x7d8: 68,
+ 0x7d9: 68,
+ 0x7da: 68,
+ 0x7db: 68,
+ 0x7dc: 68,
+ 0x7dd: 68,
+ 0x7de: 68,
+ 0x7df: 68,
+ 0x7e0: 68,
+ 0x7e1: 68,
+ 0x7e2: 68,
+ 0x7e3: 68,
+ 0x7e4: 68,
+ 0x7e5: 68,
+ 0x7e6: 68,
+ 0x7e7: 68,
+ 0x7e8: 68,
+ 0x7e9: 68,
+ 0x7ea: 68,
+ 0x7fa: 67,
+ 0x840: 82,
+ 0x841: 68,
+ 0x842: 68,
+ 0x843: 68,
+ 0x844: 68,
+ 0x845: 68,
+ 0x846: 82,
+ 0x847: 82,
+ 0x848: 68,
+ 0x849: 82,
+ 0x84a: 68,
+ 0x84b: 68,
+ 0x84c: 68,
+ 0x84d: 68,
+ 0x84e: 68,
+ 0x84f: 68,
+ 0x850: 68,
+ 0x851: 68,
+ 0x852: 68,
+ 0x853: 68,
+ 0x854: 82,
+ 0x855: 68,
+ 0x856: 82,
+ 0x857: 82,
+ 0x858: 82,
+ 0x860: 68,
+ 0x861: 85,
+ 0x862: 68,
+ 0x863: 68,
+ 0x864: 68,
+ 0x865: 68,
+ 0x866: 85,
+ 0x867: 82,
+ 0x868: 68,
+ 0x869: 82,
+ 0x86a: 82,
+ 0x870: 82,
+ 0x871: 82,
+ 0x872: 82,
+ 0x873: 82,
+ 0x874: 82,
+ 0x875: 82,
+ 0x876: 82,
+ 0x877: 82,
+ 0x878: 82,
+ 0x879: 82,
+ 0x87a: 82,
+ 0x87b: 82,
+ 0x87c: 82,
+ 0x87d: 82,
+ 0x87e: 82,
+ 0x87f: 82,
+ 0x880: 82,
+ 0x881: 82,
+ 0x882: 82,
+ 0x883: 67,
+ 0x884: 67,
+ 0x885: 67,
+ 0x886: 68,
+ 0x887: 85,
+ 0x888: 85,
+ 0x889: 68,
+ 0x88a: 68,
+ 0x88b: 68,
+ 0x88c: 68,
+ 0x88d: 68,
+ 0x88e: 82,
+ 0x890: 85,
+ 0x891: 85,
+ 0x8a0: 68,
+ 0x8a1: 68,
+ 0x8a2: 68,
+ 0x8a3: 68,
+ 0x8a4: 68,
+ 0x8a5: 68,
+ 0x8a6: 68,
+ 0x8a7: 68,
+ 0x8a8: 68,
+ 0x8a9: 68,
+ 0x8aa: 82,
+ 0x8ab: 82,
+ 0x8ac: 82,
+ 0x8ad: 85,
+ 0x8ae: 82,
+ 0x8af: 68,
+ 0x8b0: 68,
+ 0x8b1: 82,
+ 0x8b2: 82,
+ 0x8b3: 68,
+ 0x8b4: 68,
+ 0x8b5: 68,
+ 0x8b6: 68,
+ 0x8b7: 68,
+ 0x8b8: 68,
+ 0x8b9: 82,
+ 0x8ba: 68,
+ 0x8bb: 68,
+ 0x8bc: 68,
+ 0x8bd: 68,
+ 0x8be: 68,
+ 0x8bf: 68,
+ 0x8c0: 68,
+ 0x8c1: 68,
+ 0x8c2: 68,
+ 0x8c3: 68,
+ 0x8c4: 68,
+ 0x8c5: 68,
+ 0x8c6: 68,
+ 0x8c7: 68,
+ 0x8c8: 68,
+ 0x8e2: 85,
+ 0x1806: 85,
+ 0x1807: 68,
+ 0x180a: 67,
+ 0x180e: 85,
+ 0x1820: 68,
+ 0x1821: 68,
+ 0x1822: 68,
+ 0x1823: 68,
+ 0x1824: 68,
+ 0x1825: 68,
+ 0x1826: 68,
+ 0x1827: 68,
+ 0x1828: 68,
+ 0x1829: 68,
+ 0x182a: 68,
+ 0x182b: 68,
+ 0x182c: 68,
+ 0x182d: 68,
+ 0x182e: 68,
+ 0x182f: 68,
+ 0x1830: 68,
+ 0x1831: 68,
+ 0x1832: 68,
+ 0x1833: 68,
+ 0x1834: 68,
+ 0x1835: 68,
+ 0x1836: 68,
+ 0x1837: 68,
+ 0x1838: 68,
+ 0x1839: 68,
+ 0x183a: 68,
+ 0x183b: 68,
+ 0x183c: 68,
+ 0x183d: 68,
+ 0x183e: 68,
+ 0x183f: 68,
+ 0x1840: 68,
+ 0x1841: 68,
+ 0x1842: 68,
+ 0x1843: 68,
+ 0x1844: 68,
+ 0x1845: 68,
+ 0x1846: 68,
+ 0x1847: 68,
+ 0x1848: 68,
+ 0x1849: 68,
+ 0x184a: 68,
+ 0x184b: 68,
+ 0x184c: 68,
+ 0x184d: 68,
+ 0x184e: 68,
+ 0x184f: 68,
+ 0x1850: 68,
+ 0x1851: 68,
+ 0x1852: 68,
+ 0x1853: 68,
+ 0x1854: 68,
+ 0x1855: 68,
+ 0x1856: 68,
+ 0x1857: 68,
+ 0x1858: 68,
+ 0x1859: 68,
+ 0x185a: 68,
+ 0x185b: 68,
+ 0x185c: 68,
+ 0x185d: 68,
+ 0x185e: 68,
+ 0x185f: 68,
+ 0x1860: 68,
+ 0x1861: 68,
+ 0x1862: 68,
+ 0x1863: 68,
+ 0x1864: 68,
+ 0x1865: 68,
+ 0x1866: 68,
+ 0x1867: 68,
+ 0x1868: 68,
+ 0x1869: 68,
+ 0x186a: 68,
+ 0x186b: 68,
+ 0x186c: 68,
+ 0x186d: 68,
+ 0x186e: 68,
+ 0x186f: 68,
+ 0x1870: 68,
+ 0x1871: 68,
+ 0x1872: 68,
+ 0x1873: 68,
+ 0x1874: 68,
+ 0x1875: 68,
+ 0x1876: 68,
+ 0x1877: 68,
+ 0x1878: 68,
+ 0x1880: 85,
+ 0x1881: 85,
+ 0x1882: 85,
+ 0x1883: 85,
+ 0x1884: 85,
+ 0x1885: 84,
+ 0x1886: 84,
+ 0x1887: 68,
+ 0x1888: 68,
+ 0x1889: 68,
+ 0x188a: 68,
+ 0x188b: 68,
+ 0x188c: 68,
+ 0x188d: 68,
+ 0x188e: 68,
+ 0x188f: 68,
+ 0x1890: 68,
+ 0x1891: 68,
+ 0x1892: 68,
+ 0x1893: 68,
+ 0x1894: 68,
+ 0x1895: 68,
+ 0x1896: 68,
+ 0x1897: 68,
+ 0x1898: 68,
+ 0x1899: 68,
+ 0x189a: 68,
+ 0x189b: 68,
+ 0x189c: 68,
+ 0x189d: 68,
+ 0x189e: 68,
+ 0x189f: 68,
+ 0x18a0: 68,
+ 0x18a1: 68,
+ 0x18a2: 68,
+ 0x18a3: 68,
+ 0x18a4: 68,
+ 0x18a5: 68,
+ 0x18a6: 68,
+ 0x18a7: 68,
+ 0x18a8: 68,
+ 0x18aa: 68,
+ 0x200c: 85,
+ 0x200d: 67,
+ 0x202f: 85,
+ 0x2066: 85,
+ 0x2067: 85,
+ 0x2068: 85,
+ 0x2069: 85,
+ 0xa840: 68,
+ 0xa841: 68,
+ 0xa842: 68,
+ 0xa843: 68,
+ 0xa844: 68,
+ 0xa845: 68,
+ 0xa846: 68,
+ 0xa847: 68,
+ 0xa848: 68,
+ 0xa849: 68,
+ 0xa84a: 68,
+ 0xa84b: 68,
+ 0xa84c: 68,
+ 0xa84d: 68,
+ 0xa84e: 68,
+ 0xa84f: 68,
+ 0xa850: 68,
+ 0xa851: 68,
+ 0xa852: 68,
+ 0xa853: 68,
+ 0xa854: 68,
+ 0xa855: 68,
+ 0xa856: 68,
+ 0xa857: 68,
+ 0xa858: 68,
+ 0xa859: 68,
+ 0xa85a: 68,
+ 0xa85b: 68,
+ 0xa85c: 68,
+ 0xa85d: 68,
+ 0xa85e: 68,
+ 0xa85f: 68,
+ 0xa860: 68,
+ 0xa861: 68,
+ 0xa862: 68,
+ 0xa863: 68,
+ 0xa864: 68,
+ 0xa865: 68,
+ 0xa866: 68,
+ 0xa867: 68,
+ 0xa868: 68,
+ 0xa869: 68,
+ 0xa86a: 68,
+ 0xa86b: 68,
+ 0xa86c: 68,
+ 0xa86d: 68,
+ 0xa86e: 68,
+ 0xa86f: 68,
+ 0xa870: 68,
+ 0xa871: 68,
+ 0xa872: 76,
+ 0xa873: 85,
+ 0x10ac0: 68,
+ 0x10ac1: 68,
+ 0x10ac2: 68,
+ 0x10ac3: 68,
+ 0x10ac4: 68,
+ 0x10ac5: 82,
+ 0x10ac6: 85,
+ 0x10ac7: 82,
+ 0x10ac8: 85,
+ 0x10ac9: 82,
+ 0x10aca: 82,
+ 0x10acb: 85,
+ 0x10acc: 85,
+ 0x10acd: 76,
+ 0x10ace: 82,
+ 0x10acf: 82,
+ 0x10ad0: 82,
+ 0x10ad1: 82,
+ 0x10ad2: 82,
+ 0x10ad3: 68,
+ 0x10ad4: 68,
+ 0x10ad5: 68,
+ 0x10ad6: 68,
+ 0x10ad7: 76,
+ 0x10ad8: 68,
+ 0x10ad9: 68,
+ 0x10ada: 68,
+ 0x10adb: 68,
+ 0x10adc: 68,
+ 0x10add: 82,
+ 0x10ade: 68,
+ 0x10adf: 68,
+ 0x10ae0: 68,
+ 0x10ae1: 82,
+ 0x10ae2: 85,
+ 0x10ae3: 85,
+ 0x10ae4: 82,
+ 0x10aeb: 68,
+ 0x10aec: 68,
+ 0x10aed: 68,
+ 0x10aee: 68,
+ 0x10aef: 82,
+ 0x10b80: 68,
+ 0x10b81: 82,
+ 0x10b82: 68,
+ 0x10b83: 82,
+ 0x10b84: 82,
+ 0x10b85: 82,
+ 0x10b86: 68,
+ 0x10b87: 68,
+ 0x10b88: 68,
+ 0x10b89: 82,
+ 0x10b8a: 68,
+ 0x10b8b: 68,
+ 0x10b8c: 82,
+ 0x10b8d: 68,
+ 0x10b8e: 82,
+ 0x10b8f: 82,
+ 0x10b90: 68,
+ 0x10b91: 82,
+ 0x10ba9: 82,
+ 0x10baa: 82,
+ 0x10bab: 82,
+ 0x10bac: 82,
+ 0x10bad: 68,
+ 0x10bae: 68,
+ 0x10baf: 85,
+ 0x10d00: 76,
+ 0x10d01: 68,
+ 0x10d02: 68,
+ 0x10d03: 68,
+ 0x10d04: 68,
+ 0x10d05: 68,
+ 0x10d06: 68,
+ 0x10d07: 68,
+ 0x10d08: 68,
+ 0x10d09: 68,
+ 0x10d0a: 68,
+ 0x10d0b: 68,
+ 0x10d0c: 68,
+ 0x10d0d: 68,
+ 0x10d0e: 68,
+ 0x10d0f: 68,
+ 0x10d10: 68,
+ 0x10d11: 68,
+ 0x10d12: 68,
+ 0x10d13: 68,
+ 0x10d14: 68,
+ 0x10d15: 68,
+ 0x10d16: 68,
+ 0x10d17: 68,
+ 0x10d18: 68,
+ 0x10d19: 68,
+ 0x10d1a: 68,
+ 0x10d1b: 68,
+ 0x10d1c: 68,
+ 0x10d1d: 68,
+ 0x10d1e: 68,
+ 0x10d1f: 68,
+ 0x10d20: 68,
+ 0x10d21: 68,
+ 0x10d22: 82,
+ 0x10d23: 68,
+ 0x10f30: 68,
+ 0x10f31: 68,
+ 0x10f32: 68,
+ 0x10f33: 82,
+ 0x10f34: 68,
+ 0x10f35: 68,
+ 0x10f36: 68,
+ 0x10f37: 68,
+ 0x10f38: 68,
+ 0x10f39: 68,
+ 0x10f3a: 68,
+ 0x10f3b: 68,
+ 0x10f3c: 68,
+ 0x10f3d: 68,
+ 0x10f3e: 68,
+ 0x10f3f: 68,
+ 0x10f40: 68,
+ 0x10f41: 68,
+ 0x10f42: 68,
+ 0x10f43: 68,
+ 0x10f44: 68,
+ 0x10f45: 85,
+ 0x10f51: 68,
+ 0x10f52: 68,
+ 0x10f53: 68,
+ 0x10f54: 82,
+ 0x10f70: 68,
+ 0x10f71: 68,
+ 0x10f72: 68,
+ 0x10f73: 68,
+ 0x10f74: 82,
+ 0x10f75: 82,
+ 0x10f76: 68,
+ 0x10f77: 68,
+ 0x10f78: 68,
+ 0x10f79: 68,
+ 0x10f7a: 68,
+ 0x10f7b: 68,
+ 0x10f7c: 68,
+ 0x10f7d: 68,
+ 0x10f7e: 68,
+ 0x10f7f: 68,
+ 0x10f80: 68,
+ 0x10f81: 68,
+ 0x10fb0: 68,
+ 0x10fb1: 85,
+ 0x10fb2: 68,
+ 0x10fb3: 68,
+ 0x10fb4: 82,
+ 0x10fb5: 82,
+ 0x10fb6: 82,
+ 0x10fb7: 85,
+ 0x10fb8: 68,
+ 0x10fb9: 82,
+ 0x10fba: 82,
+ 0x10fbb: 68,
+ 0x10fbc: 68,
+ 0x10fbd: 82,
+ 0x10fbe: 68,
+ 0x10fbf: 68,
+ 0x10fc0: 85,
+ 0x10fc1: 68,
+ 0x10fc2: 82,
+ 0x10fc3: 82,
+ 0x10fc4: 68,
+ 0x10fc5: 85,
+ 0x10fc6: 85,
+ 0x10fc7: 85,
+ 0x10fc8: 85,
+ 0x10fc9: 82,
+ 0x10fca: 68,
+ 0x10fcb: 76,
+ 0x110bd: 85,
+ 0x110cd: 85,
+ 0x1e900: 68,
+ 0x1e901: 68,
+ 0x1e902: 68,
+ 0x1e903: 68,
+ 0x1e904: 68,
+ 0x1e905: 68,
+ 0x1e906: 68,
+ 0x1e907: 68,
+ 0x1e908: 68,
+ 0x1e909: 68,
+ 0x1e90a: 68,
+ 0x1e90b: 68,
+ 0x1e90c: 68,
+ 0x1e90d: 68,
+ 0x1e90e: 68,
+ 0x1e90f: 68,
+ 0x1e910: 68,
+ 0x1e911: 68,
+ 0x1e912: 68,
+ 0x1e913: 68,
+ 0x1e914: 68,
+ 0x1e915: 68,
+ 0x1e916: 68,
+ 0x1e917: 68,
+ 0x1e918: 68,
+ 0x1e919: 68,
+ 0x1e91a: 68,
+ 0x1e91b: 68,
+ 0x1e91c: 68,
+ 0x1e91d: 68,
+ 0x1e91e: 68,
+ 0x1e91f: 68,
+ 0x1e920: 68,
+ 0x1e921: 68,
+ 0x1e922: 68,
+ 0x1e923: 68,
+ 0x1e924: 68,
+ 0x1e925: 68,
+ 0x1e926: 68,
+ 0x1e927: 68,
+ 0x1e928: 68,
+ 0x1e929: 68,
+ 0x1e92a: 68,
+ 0x1e92b: 68,
+ 0x1e92c: 68,
+ 0x1e92d: 68,
+ 0x1e92e: 68,
+ 0x1e92f: 68,
+ 0x1e930: 68,
+ 0x1e931: 68,
+ 0x1e932: 68,
+ 0x1e933: 68,
+ 0x1e934: 68,
+ 0x1e935: 68,
+ 0x1e936: 68,
+ 0x1e937: 68,
+ 0x1e938: 68,
+ 0x1e939: 68,
+ 0x1e93a: 68,
+ 0x1e93b: 68,
+ 0x1e93c: 68,
+ 0x1e93d: 68,
+ 0x1e93e: 68,
+ 0x1e93f: 68,
+ 0x1e940: 68,
+ 0x1e941: 68,
+ 0x1e942: 68,
+ 0x1e943: 68,
+ 0x1e94b: 84,
+}
+codepoint_classes = {
+ 'PVALID': (
+ 0x2d0000002e,
+ 0x300000003a,
+ 0x610000007b,
+ 0xdf000000f7,
+ 0xf800000100,
+ 0x10100000102,
+ 0x10300000104,
+ 0x10500000106,
+ 0x10700000108,
+ 0x1090000010a,
+ 0x10b0000010c,
+ 0x10d0000010e,
+ 0x10f00000110,
+ 0x11100000112,
+ 0x11300000114,
+ 0x11500000116,
+ 0x11700000118,
+ 0x1190000011a,
+ 0x11b0000011c,
+ 0x11d0000011e,
+ 0x11f00000120,
+ 0x12100000122,
+ 0x12300000124,
+ 0x12500000126,
+ 0x12700000128,
+ 0x1290000012a,
+ 0x12b0000012c,
+ 0x12d0000012e,
+ 0x12f00000130,
+ 0x13100000132,
+ 0x13500000136,
+ 0x13700000139,
+ 0x13a0000013b,
+ 0x13c0000013d,
+ 0x13e0000013f,
+ 0x14200000143,
+ 0x14400000145,
+ 0x14600000147,
+ 0x14800000149,
+ 0x14b0000014c,
+ 0x14d0000014e,
+ 0x14f00000150,
+ 0x15100000152,
+ 0x15300000154,
+ 0x15500000156,
+ 0x15700000158,
+ 0x1590000015a,
+ 0x15b0000015c,
+ 0x15d0000015e,
+ 0x15f00000160,
+ 0x16100000162,
+ 0x16300000164,
+ 0x16500000166,
+ 0x16700000168,
+ 0x1690000016a,
+ 0x16b0000016c,
+ 0x16d0000016e,
+ 0x16f00000170,
+ 0x17100000172,
+ 0x17300000174,
+ 0x17500000176,
+ 0x17700000178,
+ 0x17a0000017b,
+ 0x17c0000017d,
+ 0x17e0000017f,
+ 0x18000000181,
+ 0x18300000184,
+ 0x18500000186,
+ 0x18800000189,
+ 0x18c0000018e,
+ 0x19200000193,
+ 0x19500000196,
+ 0x1990000019c,
+ 0x19e0000019f,
+ 0x1a1000001a2,
+ 0x1a3000001a4,
+ 0x1a5000001a6,
+ 0x1a8000001a9,
+ 0x1aa000001ac,
+ 0x1ad000001ae,
+ 0x1b0000001b1,
+ 0x1b4000001b5,
+ 0x1b6000001b7,
+ 0x1b9000001bc,
+ 0x1bd000001c4,
+ 0x1ce000001cf,
+ 0x1d0000001d1,
+ 0x1d2000001d3,
+ 0x1d4000001d5,
+ 0x1d6000001d7,
+ 0x1d8000001d9,
+ 0x1da000001db,
+ 0x1dc000001de,
+ 0x1df000001e0,
+ 0x1e1000001e2,
+ 0x1e3000001e4,
+ 0x1e5000001e6,
+ 0x1e7000001e8,
+ 0x1e9000001ea,
+ 0x1eb000001ec,
+ 0x1ed000001ee,
+ 0x1ef000001f1,
+ 0x1f5000001f6,
+ 0x1f9000001fa,
+ 0x1fb000001fc,
+ 0x1fd000001fe,
+ 0x1ff00000200,
+ 0x20100000202,
+ 0x20300000204,
+ 0x20500000206,
+ 0x20700000208,
+ 0x2090000020a,
+ 0x20b0000020c,
+ 0x20d0000020e,
+ 0x20f00000210,
+ 0x21100000212,
+ 0x21300000214,
+ 0x21500000216,
+ 0x21700000218,
+ 0x2190000021a,
+ 0x21b0000021c,
+ 0x21d0000021e,
+ 0x21f00000220,
+ 0x22100000222,
+ 0x22300000224,
+ 0x22500000226,
+ 0x22700000228,
+ 0x2290000022a,
+ 0x22b0000022c,
+ 0x22d0000022e,
+ 0x22f00000230,
+ 0x23100000232,
+ 0x2330000023a,
+ 0x23c0000023d,
+ 0x23f00000241,
+ 0x24200000243,
+ 0x24700000248,
+ 0x2490000024a,
+ 0x24b0000024c,
+ 0x24d0000024e,
+ 0x24f000002b0,
+ 0x2b9000002c2,
+ 0x2c6000002d2,
+ 0x2ec000002ed,
+ 0x2ee000002ef,
+ 0x30000000340,
+ 0x34200000343,
+ 0x3460000034f,
+ 0x35000000370,
+ 0x37100000372,
+ 0x37300000374,
+ 0x37700000378,
+ 0x37b0000037e,
+ 0x39000000391,
+ 0x3ac000003cf,
+ 0x3d7000003d8,
+ 0x3d9000003da,
+ 0x3db000003dc,
+ 0x3dd000003de,
+ 0x3df000003e0,
+ 0x3e1000003e2,
+ 0x3e3000003e4,
+ 0x3e5000003e6,
+ 0x3e7000003e8,
+ 0x3e9000003ea,
+ 0x3eb000003ec,
+ 0x3ed000003ee,
+ 0x3ef000003f0,
+ 0x3f3000003f4,
+ 0x3f8000003f9,
+ 0x3fb000003fd,
+ 0x43000000460,
+ 0x46100000462,
+ 0x46300000464,
+ 0x46500000466,
+ 0x46700000468,
+ 0x4690000046a,
+ 0x46b0000046c,
+ 0x46d0000046e,
+ 0x46f00000470,
+ 0x47100000472,
+ 0x47300000474,
+ 0x47500000476,
+ 0x47700000478,
+ 0x4790000047a,
+ 0x47b0000047c,
+ 0x47d0000047e,
+ 0x47f00000480,
+ 0x48100000482,
+ 0x48300000488,
+ 0x48b0000048c,
+ 0x48d0000048e,
+ 0x48f00000490,
+ 0x49100000492,
+ 0x49300000494,
+ 0x49500000496,
+ 0x49700000498,
+ 0x4990000049a,
+ 0x49b0000049c,
+ 0x49d0000049e,
+ 0x49f000004a0,
+ 0x4a1000004a2,
+ 0x4a3000004a4,
+ 0x4a5000004a6,
+ 0x4a7000004a8,
+ 0x4a9000004aa,
+ 0x4ab000004ac,
+ 0x4ad000004ae,
+ 0x4af000004b0,
+ 0x4b1000004b2,
+ 0x4b3000004b4,
+ 0x4b5000004b6,
+ 0x4b7000004b8,
+ 0x4b9000004ba,
+ 0x4bb000004bc,
+ 0x4bd000004be,
+ 0x4bf000004c0,
+ 0x4c2000004c3,
+ 0x4c4000004c5,
+ 0x4c6000004c7,
+ 0x4c8000004c9,
+ 0x4ca000004cb,
+ 0x4cc000004cd,
+ 0x4ce000004d0,
+ 0x4d1000004d2,
+ 0x4d3000004d4,
+ 0x4d5000004d6,
+ 0x4d7000004d8,
+ 0x4d9000004da,
+ 0x4db000004dc,
+ 0x4dd000004de,
+ 0x4df000004e0,
+ 0x4e1000004e2,
+ 0x4e3000004e4,
+ 0x4e5000004e6,
+ 0x4e7000004e8,
+ 0x4e9000004ea,
+ 0x4eb000004ec,
+ 0x4ed000004ee,
+ 0x4ef000004f0,
+ 0x4f1000004f2,
+ 0x4f3000004f4,
+ 0x4f5000004f6,
+ 0x4f7000004f8,
+ 0x4f9000004fa,
+ 0x4fb000004fc,
+ 0x4fd000004fe,
+ 0x4ff00000500,
+ 0x50100000502,
+ 0x50300000504,
+ 0x50500000506,
+ 0x50700000508,
+ 0x5090000050a,
+ 0x50b0000050c,
+ 0x50d0000050e,
+ 0x50f00000510,
+ 0x51100000512,
+ 0x51300000514,
+ 0x51500000516,
+ 0x51700000518,
+ 0x5190000051a,
+ 0x51b0000051c,
+ 0x51d0000051e,
+ 0x51f00000520,
+ 0x52100000522,
+ 0x52300000524,
+ 0x52500000526,
+ 0x52700000528,
+ 0x5290000052a,
+ 0x52b0000052c,
+ 0x52d0000052e,
+ 0x52f00000530,
+ 0x5590000055a,
+ 0x56000000587,
+ 0x58800000589,
+ 0x591000005be,
+ 0x5bf000005c0,
+ 0x5c1000005c3,
+ 0x5c4000005c6,
+ 0x5c7000005c8,
+ 0x5d0000005eb,
+ 0x5ef000005f3,
+ 0x6100000061b,
+ 0x62000000640,
+ 0x64100000660,
+ 0x66e00000675,
+ 0x679000006d4,
+ 0x6d5000006dd,
+ 0x6df000006e9,
+ 0x6ea000006f0,
+ 0x6fa00000700,
+ 0x7100000074b,
+ 0x74d000007b2,
+ 0x7c0000007f6,
+ 0x7fd000007fe,
+ 0x8000000082e,
+ 0x8400000085c,
+ 0x8600000086b,
+ 0x87000000888,
+ 0x8890000088f,
+ 0x898000008e2,
+ 0x8e300000958,
+ 0x96000000964,
+ 0x96600000970,
+ 0x97100000984,
+ 0x9850000098d,
+ 0x98f00000991,
+ 0x993000009a9,
+ 0x9aa000009b1,
+ 0x9b2000009b3,
+ 0x9b6000009ba,
+ 0x9bc000009c5,
+ 0x9c7000009c9,
+ 0x9cb000009cf,
+ 0x9d7000009d8,
+ 0x9e0000009e4,
+ 0x9e6000009f2,
+ 0x9fc000009fd,
+ 0x9fe000009ff,
+ 0xa0100000a04,
+ 0xa0500000a0b,
+ 0xa0f00000a11,
+ 0xa1300000a29,
+ 0xa2a00000a31,
+ 0xa3200000a33,
+ 0xa3500000a36,
+ 0xa3800000a3a,
+ 0xa3c00000a3d,
+ 0xa3e00000a43,
+ 0xa4700000a49,
+ 0xa4b00000a4e,
+ 0xa5100000a52,
+ 0xa5c00000a5d,
+ 0xa6600000a76,
+ 0xa8100000a84,
+ 0xa8500000a8e,
+ 0xa8f00000a92,
+ 0xa9300000aa9,
+ 0xaaa00000ab1,
+ 0xab200000ab4,
+ 0xab500000aba,
+ 0xabc00000ac6,
+ 0xac700000aca,
+ 0xacb00000ace,
+ 0xad000000ad1,
+ 0xae000000ae4,
+ 0xae600000af0,
+ 0xaf900000b00,
+ 0xb0100000b04,
+ 0xb0500000b0d,
+ 0xb0f00000b11,
+ 0xb1300000b29,
+ 0xb2a00000b31,
+ 0xb3200000b34,
+ 0xb3500000b3a,
+ 0xb3c00000b45,
+ 0xb4700000b49,
+ 0xb4b00000b4e,
+ 0xb5500000b58,
+ 0xb5f00000b64,
+ 0xb6600000b70,
+ 0xb7100000b72,
+ 0xb8200000b84,
+ 0xb8500000b8b,
+ 0xb8e00000b91,
+ 0xb9200000b96,
+ 0xb9900000b9b,
+ 0xb9c00000b9d,
+ 0xb9e00000ba0,
+ 0xba300000ba5,
+ 0xba800000bab,
+ 0xbae00000bba,
+ 0xbbe00000bc3,
+ 0xbc600000bc9,
+ 0xbca00000bce,
+ 0xbd000000bd1,
+ 0xbd700000bd8,
+ 0xbe600000bf0,
+ 0xc0000000c0d,
+ 0xc0e00000c11,
+ 0xc1200000c29,
+ 0xc2a00000c3a,
+ 0xc3c00000c45,
+ 0xc4600000c49,
+ 0xc4a00000c4e,
+ 0xc5500000c57,
+ 0xc5800000c5b,
+ 0xc5d00000c5e,
+ 0xc6000000c64,
+ 0xc6600000c70,
+ 0xc8000000c84,
+ 0xc8500000c8d,
+ 0xc8e00000c91,
+ 0xc9200000ca9,
+ 0xcaa00000cb4,
+ 0xcb500000cba,
+ 0xcbc00000cc5,
+ 0xcc600000cc9,
+ 0xcca00000cce,
+ 0xcd500000cd7,
+ 0xcdd00000cdf,
+ 0xce000000ce4,
+ 0xce600000cf0,
+ 0xcf100000cf4,
+ 0xd0000000d0d,
+ 0xd0e00000d11,
+ 0xd1200000d45,
+ 0xd4600000d49,
+ 0xd4a00000d4f,
+ 0xd5400000d58,
+ 0xd5f00000d64,
+ 0xd6600000d70,
+ 0xd7a00000d80,
+ 0xd8100000d84,
+ 0xd8500000d97,
+ 0xd9a00000db2,
+ 0xdb300000dbc,
+ 0xdbd00000dbe,
+ 0xdc000000dc7,
+ 0xdca00000dcb,
+ 0xdcf00000dd5,
+ 0xdd600000dd7,
+ 0xdd800000de0,
+ 0xde600000df0,
+ 0xdf200000df4,
+ 0xe0100000e33,
+ 0xe3400000e3b,
+ 0xe4000000e4f,
+ 0xe5000000e5a,
+ 0xe8100000e83,
+ 0xe8400000e85,
+ 0xe8600000e8b,
+ 0xe8c00000ea4,
+ 0xea500000ea6,
+ 0xea700000eb3,
+ 0xeb400000ebe,
+ 0xec000000ec5,
+ 0xec600000ec7,
+ 0xec800000ecf,
+ 0xed000000eda,
+ 0xede00000ee0,
+ 0xf0000000f01,
+ 0xf0b00000f0c,
+ 0xf1800000f1a,
+ 0xf2000000f2a,
+ 0xf3500000f36,
+ 0xf3700000f38,
+ 0xf3900000f3a,
+ 0xf3e00000f43,
+ 0xf4400000f48,
+ 0xf4900000f4d,
+ 0xf4e00000f52,
+ 0xf5300000f57,
+ 0xf5800000f5c,
+ 0xf5d00000f69,
+ 0xf6a00000f6d,
+ 0xf7100000f73,
+ 0xf7400000f75,
+ 0xf7a00000f81,
+ 0xf8200000f85,
+ 0xf8600000f93,
+ 0xf9400000f98,
+ 0xf9900000f9d,
+ 0xf9e00000fa2,
+ 0xfa300000fa7,
+ 0xfa800000fac,
+ 0xfad00000fb9,
+ 0xfba00000fbd,
+ 0xfc600000fc7,
+ 0x10000000104a,
+ 0x10500000109e,
+ 0x10d0000010fb,
+ 0x10fd00001100,
+ 0x120000001249,
+ 0x124a0000124e,
+ 0x125000001257,
+ 0x125800001259,
+ 0x125a0000125e,
+ 0x126000001289,
+ 0x128a0000128e,
+ 0x1290000012b1,
+ 0x12b2000012b6,
+ 0x12b8000012bf,
+ 0x12c0000012c1,
+ 0x12c2000012c6,
+ 0x12c8000012d7,
+ 0x12d800001311,
+ 0x131200001316,
+ 0x13180000135b,
+ 0x135d00001360,
+ 0x138000001390,
+ 0x13a0000013f6,
+ 0x14010000166d,
+ 0x166f00001680,
+ 0x16810000169b,
+ 0x16a0000016eb,
+ 0x16f1000016f9,
+ 0x170000001716,
+ 0x171f00001735,
+ 0x174000001754,
+ 0x17600000176d,
+ 0x176e00001771,
+ 0x177200001774,
+ 0x1780000017b4,
+ 0x17b6000017d4,
+ 0x17d7000017d8,
+ 0x17dc000017de,
+ 0x17e0000017ea,
+ 0x18100000181a,
+ 0x182000001879,
+ 0x1880000018ab,
+ 0x18b0000018f6,
+ 0x19000000191f,
+ 0x19200000192c,
+ 0x19300000193c,
+ 0x19460000196e,
+ 0x197000001975,
+ 0x1980000019ac,
+ 0x19b0000019ca,
+ 0x19d0000019da,
+ 0x1a0000001a1c,
+ 0x1a2000001a5f,
+ 0x1a6000001a7d,
+ 0x1a7f00001a8a,
+ 0x1a9000001a9a,
+ 0x1aa700001aa8,
+ 0x1ab000001abe,
+ 0x1abf00001acf,
+ 0x1b0000001b4d,
+ 0x1b5000001b5a,
+ 0x1b6b00001b74,
+ 0x1b8000001bf4,
+ 0x1c0000001c38,
+ 0x1c4000001c4a,
+ 0x1c4d00001c7e,
+ 0x1cd000001cd3,
+ 0x1cd400001cfb,
+ 0x1d0000001d2c,
+ 0x1d2f00001d30,
+ 0x1d3b00001d3c,
+ 0x1d4e00001d4f,
+ 0x1d6b00001d78,
+ 0x1d7900001d9b,
+ 0x1dc000001e00,
+ 0x1e0100001e02,
+ 0x1e0300001e04,
+ 0x1e0500001e06,
+ 0x1e0700001e08,
+ 0x1e0900001e0a,
+ 0x1e0b00001e0c,
+ 0x1e0d00001e0e,
+ 0x1e0f00001e10,
+ 0x1e1100001e12,
+ 0x1e1300001e14,
+ 0x1e1500001e16,
+ 0x1e1700001e18,
+ 0x1e1900001e1a,
+ 0x1e1b00001e1c,
+ 0x1e1d00001e1e,
+ 0x1e1f00001e20,
+ 0x1e2100001e22,
+ 0x1e2300001e24,
+ 0x1e2500001e26,
+ 0x1e2700001e28,
+ 0x1e2900001e2a,
+ 0x1e2b00001e2c,
+ 0x1e2d00001e2e,
+ 0x1e2f00001e30,
+ 0x1e3100001e32,
+ 0x1e3300001e34,
+ 0x1e3500001e36,
+ 0x1e3700001e38,
+ 0x1e3900001e3a,
+ 0x1e3b00001e3c,
+ 0x1e3d00001e3e,
+ 0x1e3f00001e40,
+ 0x1e4100001e42,
+ 0x1e4300001e44,
+ 0x1e4500001e46,
+ 0x1e4700001e48,
+ 0x1e4900001e4a,
+ 0x1e4b00001e4c,
+ 0x1e4d00001e4e,
+ 0x1e4f00001e50,
+ 0x1e5100001e52,
+ 0x1e5300001e54,
+ 0x1e5500001e56,
+ 0x1e5700001e58,
+ 0x1e5900001e5a,
+ 0x1e5b00001e5c,
+ 0x1e5d00001e5e,
+ 0x1e5f00001e60,
+ 0x1e6100001e62,
+ 0x1e6300001e64,
+ 0x1e6500001e66,
+ 0x1e6700001e68,
+ 0x1e6900001e6a,
+ 0x1e6b00001e6c,
+ 0x1e6d00001e6e,
+ 0x1e6f00001e70,
+ 0x1e7100001e72,
+ 0x1e7300001e74,
+ 0x1e7500001e76,
+ 0x1e7700001e78,
+ 0x1e7900001e7a,
+ 0x1e7b00001e7c,
+ 0x1e7d00001e7e,
+ 0x1e7f00001e80,
+ 0x1e8100001e82,
+ 0x1e8300001e84,
+ 0x1e8500001e86,
+ 0x1e8700001e88,
+ 0x1e8900001e8a,
+ 0x1e8b00001e8c,
+ 0x1e8d00001e8e,
+ 0x1e8f00001e90,
+ 0x1e9100001e92,
+ 0x1e9300001e94,
+ 0x1e9500001e9a,
+ 0x1e9c00001e9e,
+ 0x1e9f00001ea0,
+ 0x1ea100001ea2,
+ 0x1ea300001ea4,
+ 0x1ea500001ea6,
+ 0x1ea700001ea8,
+ 0x1ea900001eaa,
+ 0x1eab00001eac,
+ 0x1ead00001eae,
+ 0x1eaf00001eb0,
+ 0x1eb100001eb2,
+ 0x1eb300001eb4,
+ 0x1eb500001eb6,
+ 0x1eb700001eb8,
+ 0x1eb900001eba,
+ 0x1ebb00001ebc,
+ 0x1ebd00001ebe,
+ 0x1ebf00001ec0,
+ 0x1ec100001ec2,
+ 0x1ec300001ec4,
+ 0x1ec500001ec6,
+ 0x1ec700001ec8,
+ 0x1ec900001eca,
+ 0x1ecb00001ecc,
+ 0x1ecd00001ece,
+ 0x1ecf00001ed0,
+ 0x1ed100001ed2,
+ 0x1ed300001ed4,
+ 0x1ed500001ed6,
+ 0x1ed700001ed8,
+ 0x1ed900001eda,
+ 0x1edb00001edc,
+ 0x1edd00001ede,
+ 0x1edf00001ee0,
+ 0x1ee100001ee2,
+ 0x1ee300001ee4,
+ 0x1ee500001ee6,
+ 0x1ee700001ee8,
+ 0x1ee900001eea,
+ 0x1eeb00001eec,
+ 0x1eed00001eee,
+ 0x1eef00001ef0,
+ 0x1ef100001ef2,
+ 0x1ef300001ef4,
+ 0x1ef500001ef6,
+ 0x1ef700001ef8,
+ 0x1ef900001efa,
+ 0x1efb00001efc,
+ 0x1efd00001efe,
+ 0x1eff00001f08,
+ 0x1f1000001f16,
+ 0x1f2000001f28,
+ 0x1f3000001f38,
+ 0x1f4000001f46,
+ 0x1f5000001f58,
+ 0x1f6000001f68,
+ 0x1f7000001f71,
+ 0x1f7200001f73,
+ 0x1f7400001f75,
+ 0x1f7600001f77,
+ 0x1f7800001f79,
+ 0x1f7a00001f7b,
+ 0x1f7c00001f7d,
+ 0x1fb000001fb2,
+ 0x1fb600001fb7,
+ 0x1fc600001fc7,
+ 0x1fd000001fd3,
+ 0x1fd600001fd8,
+ 0x1fe000001fe3,
+ 0x1fe400001fe8,
+ 0x1ff600001ff7,
+ 0x214e0000214f,
+ 0x218400002185,
+ 0x2c3000002c60,
+ 0x2c6100002c62,
+ 0x2c6500002c67,
+ 0x2c6800002c69,
+ 0x2c6a00002c6b,
+ 0x2c6c00002c6d,
+ 0x2c7100002c72,
+ 0x2c7300002c75,
+ 0x2c7600002c7c,
+ 0x2c8100002c82,
+ 0x2c8300002c84,
+ 0x2c8500002c86,
+ 0x2c8700002c88,
+ 0x2c8900002c8a,
+ 0x2c8b00002c8c,
+ 0x2c8d00002c8e,
+ 0x2c8f00002c90,
+ 0x2c9100002c92,
+ 0x2c9300002c94,
+ 0x2c9500002c96,
+ 0x2c9700002c98,
+ 0x2c9900002c9a,
+ 0x2c9b00002c9c,
+ 0x2c9d00002c9e,
+ 0x2c9f00002ca0,
+ 0x2ca100002ca2,
+ 0x2ca300002ca4,
+ 0x2ca500002ca6,
+ 0x2ca700002ca8,
+ 0x2ca900002caa,
+ 0x2cab00002cac,
+ 0x2cad00002cae,
+ 0x2caf00002cb0,
+ 0x2cb100002cb2,
+ 0x2cb300002cb4,
+ 0x2cb500002cb6,
+ 0x2cb700002cb8,
+ 0x2cb900002cba,
+ 0x2cbb00002cbc,
+ 0x2cbd00002cbe,
+ 0x2cbf00002cc0,
+ 0x2cc100002cc2,
+ 0x2cc300002cc4,
+ 0x2cc500002cc6,
+ 0x2cc700002cc8,
+ 0x2cc900002cca,
+ 0x2ccb00002ccc,
+ 0x2ccd00002cce,
+ 0x2ccf00002cd0,
+ 0x2cd100002cd2,
+ 0x2cd300002cd4,
+ 0x2cd500002cd6,
+ 0x2cd700002cd8,
+ 0x2cd900002cda,
+ 0x2cdb00002cdc,
+ 0x2cdd00002cde,
+ 0x2cdf00002ce0,
+ 0x2ce100002ce2,
+ 0x2ce300002ce5,
+ 0x2cec00002ced,
+ 0x2cee00002cf2,
+ 0x2cf300002cf4,
+ 0x2d0000002d26,
+ 0x2d2700002d28,
+ 0x2d2d00002d2e,
+ 0x2d3000002d68,
+ 0x2d7f00002d97,
+ 0x2da000002da7,
+ 0x2da800002daf,
+ 0x2db000002db7,
+ 0x2db800002dbf,
+ 0x2dc000002dc7,
+ 0x2dc800002dcf,
+ 0x2dd000002dd7,
+ 0x2dd800002ddf,
+ 0x2de000002e00,
+ 0x2e2f00002e30,
+ 0x300500003008,
+ 0x302a0000302e,
+ 0x303c0000303d,
+ 0x304100003097,
+ 0x30990000309b,
+ 0x309d0000309f,
+ 0x30a1000030fb,
+ 0x30fc000030ff,
+ 0x310500003130,
+ 0x31a0000031c0,
+ 0x31f000003200,
+ 0x340000004dc0,
+ 0x4e000000a48d,
+ 0xa4d00000a4fe,
+ 0xa5000000a60d,
+ 0xa6100000a62c,
+ 0xa6410000a642,
+ 0xa6430000a644,
+ 0xa6450000a646,
+ 0xa6470000a648,
+ 0xa6490000a64a,
+ 0xa64b0000a64c,
+ 0xa64d0000a64e,
+ 0xa64f0000a650,
+ 0xa6510000a652,
+ 0xa6530000a654,
+ 0xa6550000a656,
+ 0xa6570000a658,
+ 0xa6590000a65a,
+ 0xa65b0000a65c,
+ 0xa65d0000a65e,
+ 0xa65f0000a660,
+ 0xa6610000a662,
+ 0xa6630000a664,
+ 0xa6650000a666,
+ 0xa6670000a668,
+ 0xa6690000a66a,
+ 0xa66b0000a66c,
+ 0xa66d0000a670,
+ 0xa6740000a67e,
+ 0xa67f0000a680,
+ 0xa6810000a682,
+ 0xa6830000a684,
+ 0xa6850000a686,
+ 0xa6870000a688,
+ 0xa6890000a68a,
+ 0xa68b0000a68c,
+ 0xa68d0000a68e,
+ 0xa68f0000a690,
+ 0xa6910000a692,
+ 0xa6930000a694,
+ 0xa6950000a696,
+ 0xa6970000a698,
+ 0xa6990000a69a,
+ 0xa69b0000a69c,
+ 0xa69e0000a6e6,
+ 0xa6f00000a6f2,
+ 0xa7170000a720,
+ 0xa7230000a724,
+ 0xa7250000a726,
+ 0xa7270000a728,
+ 0xa7290000a72a,
+ 0xa72b0000a72c,
+ 0xa72d0000a72e,
+ 0xa72f0000a732,
+ 0xa7330000a734,
+ 0xa7350000a736,
+ 0xa7370000a738,
+ 0xa7390000a73a,
+ 0xa73b0000a73c,
+ 0xa73d0000a73e,
+ 0xa73f0000a740,
+ 0xa7410000a742,
+ 0xa7430000a744,
+ 0xa7450000a746,
+ 0xa7470000a748,
+ 0xa7490000a74a,
+ 0xa74b0000a74c,
+ 0xa74d0000a74e,
+ 0xa74f0000a750,
+ 0xa7510000a752,
+ 0xa7530000a754,
+ 0xa7550000a756,
+ 0xa7570000a758,
+ 0xa7590000a75a,
+ 0xa75b0000a75c,
+ 0xa75d0000a75e,
+ 0xa75f0000a760,
+ 0xa7610000a762,
+ 0xa7630000a764,
+ 0xa7650000a766,
+ 0xa7670000a768,
+ 0xa7690000a76a,
+ 0xa76b0000a76c,
+ 0xa76d0000a76e,
+ 0xa76f0000a770,
+ 0xa7710000a779,
+ 0xa77a0000a77b,
+ 0xa77c0000a77d,
+ 0xa77f0000a780,
+ 0xa7810000a782,
+ 0xa7830000a784,
+ 0xa7850000a786,
+ 0xa7870000a789,
+ 0xa78c0000a78d,
+ 0xa78e0000a790,
+ 0xa7910000a792,
+ 0xa7930000a796,
+ 0xa7970000a798,
+ 0xa7990000a79a,
+ 0xa79b0000a79c,
+ 0xa79d0000a79e,
+ 0xa79f0000a7a0,
+ 0xa7a10000a7a2,
+ 0xa7a30000a7a4,
+ 0xa7a50000a7a6,
+ 0xa7a70000a7a8,
+ 0xa7a90000a7aa,
+ 0xa7af0000a7b0,
+ 0xa7b50000a7b6,
+ 0xa7b70000a7b8,
+ 0xa7b90000a7ba,
+ 0xa7bb0000a7bc,
+ 0xa7bd0000a7be,
+ 0xa7bf0000a7c0,
+ 0xa7c10000a7c2,
+ 0xa7c30000a7c4,
+ 0xa7c80000a7c9,
+ 0xa7ca0000a7cb,
+ 0xa7d10000a7d2,
+ 0xa7d30000a7d4,
+ 0xa7d50000a7d6,
+ 0xa7d70000a7d8,
+ 0xa7d90000a7da,
+ 0xa7f60000a7f8,
+ 0xa7fa0000a828,
+ 0xa82c0000a82d,
+ 0xa8400000a874,
+ 0xa8800000a8c6,
+ 0xa8d00000a8da,
+ 0xa8e00000a8f8,
+ 0xa8fb0000a8fc,
+ 0xa8fd0000a92e,
+ 0xa9300000a954,
+ 0xa9800000a9c1,
+ 0xa9cf0000a9da,
+ 0xa9e00000a9ff,
+ 0xaa000000aa37,
+ 0xaa400000aa4e,
+ 0xaa500000aa5a,
+ 0xaa600000aa77,
+ 0xaa7a0000aac3,
+ 0xaadb0000aade,
+ 0xaae00000aaf0,
+ 0xaaf20000aaf7,
+ 0xab010000ab07,
+ 0xab090000ab0f,
+ 0xab110000ab17,
+ 0xab200000ab27,
+ 0xab280000ab2f,
+ 0xab300000ab5b,
+ 0xab600000ab69,
+ 0xabc00000abeb,
+ 0xabec0000abee,
+ 0xabf00000abfa,
+ 0xac000000d7a4,
+ 0xfa0e0000fa10,
+ 0xfa110000fa12,
+ 0xfa130000fa15,
+ 0xfa1f0000fa20,
+ 0xfa210000fa22,
+ 0xfa230000fa25,
+ 0xfa270000fa2a,
+ 0xfb1e0000fb1f,
+ 0xfe200000fe30,
+ 0xfe730000fe74,
+ 0x100000001000c,
+ 0x1000d00010027,
+ 0x100280001003b,
+ 0x1003c0001003e,
+ 0x1003f0001004e,
+ 0x100500001005e,
+ 0x10080000100fb,
+ 0x101fd000101fe,
+ 0x102800001029d,
+ 0x102a0000102d1,
+ 0x102e0000102e1,
+ 0x1030000010320,
+ 0x1032d00010341,
+ 0x103420001034a,
+ 0x103500001037b,
+ 0x103800001039e,
+ 0x103a0000103c4,
+ 0x103c8000103d0,
+ 0x104280001049e,
+ 0x104a0000104aa,
+ 0x104d8000104fc,
+ 0x1050000010528,
+ 0x1053000010564,
+ 0x10597000105a2,
+ 0x105a3000105b2,
+ 0x105b3000105ba,
+ 0x105bb000105bd,
+ 0x1060000010737,
+ 0x1074000010756,
+ 0x1076000010768,
+ 0x1078000010781,
+ 0x1080000010806,
+ 0x1080800010809,
+ 0x1080a00010836,
+ 0x1083700010839,
+ 0x1083c0001083d,
+ 0x1083f00010856,
+ 0x1086000010877,
+ 0x108800001089f,
+ 0x108e0000108f3,
+ 0x108f4000108f6,
+ 0x1090000010916,
+ 0x109200001093a,
+ 0x10980000109b8,
+ 0x109be000109c0,
+ 0x10a0000010a04,
+ 0x10a0500010a07,
+ 0x10a0c00010a14,
+ 0x10a1500010a18,
+ 0x10a1900010a36,
+ 0x10a3800010a3b,
+ 0x10a3f00010a40,
+ 0x10a6000010a7d,
+ 0x10a8000010a9d,
+ 0x10ac000010ac8,
+ 0x10ac900010ae7,
+ 0x10b0000010b36,
+ 0x10b4000010b56,
+ 0x10b6000010b73,
+ 0x10b8000010b92,
+ 0x10c0000010c49,
+ 0x10cc000010cf3,
+ 0x10d0000010d28,
+ 0x10d3000010d3a,
+ 0x10e8000010eaa,
+ 0x10eab00010ead,
+ 0x10eb000010eb2,
+ 0x10efd00010f1d,
+ 0x10f2700010f28,
+ 0x10f3000010f51,
+ 0x10f7000010f86,
+ 0x10fb000010fc5,
+ 0x10fe000010ff7,
+ 0x1100000011047,
+ 0x1106600011076,
+ 0x1107f000110bb,
+ 0x110c2000110c3,
+ 0x110d0000110e9,
+ 0x110f0000110fa,
+ 0x1110000011135,
+ 0x1113600011140,
+ 0x1114400011148,
+ 0x1115000011174,
+ 0x1117600011177,
+ 0x11180000111c5,
+ 0x111c9000111cd,
+ 0x111ce000111db,
+ 0x111dc000111dd,
+ 0x1120000011212,
+ 0x1121300011238,
+ 0x1123e00011242,
+ 0x1128000011287,
+ 0x1128800011289,
+ 0x1128a0001128e,
+ 0x1128f0001129e,
+ 0x1129f000112a9,
+ 0x112b0000112eb,
+ 0x112f0000112fa,
+ 0x1130000011304,
+ 0x113050001130d,
+ 0x1130f00011311,
+ 0x1131300011329,
+ 0x1132a00011331,
+ 0x1133200011334,
+ 0x113350001133a,
+ 0x1133b00011345,
+ 0x1134700011349,
+ 0x1134b0001134e,
+ 0x1135000011351,
+ 0x1135700011358,
+ 0x1135d00011364,
+ 0x113660001136d,
+ 0x1137000011375,
+ 0x114000001144b,
+ 0x114500001145a,
+ 0x1145e00011462,
+ 0x11480000114c6,
+ 0x114c7000114c8,
+ 0x114d0000114da,
+ 0x11580000115b6,
+ 0x115b8000115c1,
+ 0x115d8000115de,
+ 0x1160000011641,
+ 0x1164400011645,
+ 0x116500001165a,
+ 0x11680000116b9,
+ 0x116c0000116ca,
+ 0x117000001171b,
+ 0x1171d0001172c,
+ 0x117300001173a,
+ 0x1174000011747,
+ 0x118000001183b,
+ 0x118c0000118ea,
+ 0x118ff00011907,
+ 0x119090001190a,
+ 0x1190c00011914,
+ 0x1191500011917,
+ 0x1191800011936,
+ 0x1193700011939,
+ 0x1193b00011944,
+ 0x119500001195a,
+ 0x119a0000119a8,
+ 0x119aa000119d8,
+ 0x119da000119e2,
+ 0x119e3000119e5,
+ 0x11a0000011a3f,
+ 0x11a4700011a48,
+ 0x11a5000011a9a,
+ 0x11a9d00011a9e,
+ 0x11ab000011af9,
+ 0x11c0000011c09,
+ 0x11c0a00011c37,
+ 0x11c3800011c41,
+ 0x11c5000011c5a,
+ 0x11c7200011c90,
+ 0x11c9200011ca8,
+ 0x11ca900011cb7,
+ 0x11d0000011d07,
+ 0x11d0800011d0a,
+ 0x11d0b00011d37,
+ 0x11d3a00011d3b,
+ 0x11d3c00011d3e,
+ 0x11d3f00011d48,
+ 0x11d5000011d5a,
+ 0x11d6000011d66,
+ 0x11d6700011d69,
+ 0x11d6a00011d8f,
+ 0x11d9000011d92,
+ 0x11d9300011d99,
+ 0x11da000011daa,
+ 0x11ee000011ef7,
+ 0x11f0000011f11,
+ 0x11f1200011f3b,
+ 0x11f3e00011f43,
+ 0x11f5000011f5a,
+ 0x11fb000011fb1,
+ 0x120000001239a,
+ 0x1248000012544,
+ 0x12f9000012ff1,
+ 0x1300000013430,
+ 0x1344000013456,
+ 0x1440000014647,
+ 0x1680000016a39,
+ 0x16a4000016a5f,
+ 0x16a6000016a6a,
+ 0x16a7000016abf,
+ 0x16ac000016aca,
+ 0x16ad000016aee,
+ 0x16af000016af5,
+ 0x16b0000016b37,
+ 0x16b4000016b44,
+ 0x16b5000016b5a,
+ 0x16b6300016b78,
+ 0x16b7d00016b90,
+ 0x16e6000016e80,
+ 0x16f0000016f4b,
+ 0x16f4f00016f88,
+ 0x16f8f00016fa0,
+ 0x16fe000016fe2,
+ 0x16fe300016fe5,
+ 0x16ff000016ff2,
+ 0x17000000187f8,
+ 0x1880000018cd6,
+ 0x18d0000018d09,
+ 0x1aff00001aff4,
+ 0x1aff50001affc,
+ 0x1affd0001afff,
+ 0x1b0000001b123,
+ 0x1b1320001b133,
+ 0x1b1500001b153,
+ 0x1b1550001b156,
+ 0x1b1640001b168,
+ 0x1b1700001b2fc,
+ 0x1bc000001bc6b,
+ 0x1bc700001bc7d,
+ 0x1bc800001bc89,
+ 0x1bc900001bc9a,
+ 0x1bc9d0001bc9f,
+ 0x1cf000001cf2e,
+ 0x1cf300001cf47,
+ 0x1da000001da37,
+ 0x1da3b0001da6d,
+ 0x1da750001da76,
+ 0x1da840001da85,
+ 0x1da9b0001daa0,
+ 0x1daa10001dab0,
+ 0x1df000001df1f,
+ 0x1df250001df2b,
+ 0x1e0000001e007,
+ 0x1e0080001e019,
+ 0x1e01b0001e022,
+ 0x1e0230001e025,
+ 0x1e0260001e02b,
+ 0x1e0300001e06e,
+ 0x1e08f0001e090,
+ 0x1e1000001e12d,
+ 0x1e1300001e13e,
+ 0x1e1400001e14a,
+ 0x1e14e0001e14f,
+ 0x1e2900001e2af,
+ 0x1e2c00001e2fa,
+ 0x1e4d00001e4fa,
+ 0x1e7e00001e7e7,
+ 0x1e7e80001e7ec,
+ 0x1e7ed0001e7ef,
+ 0x1e7f00001e7ff,
+ 0x1e8000001e8c5,
+ 0x1e8d00001e8d7,
+ 0x1e9220001e94c,
+ 0x1e9500001e95a,
+ 0x200000002a6e0,
+ 0x2a7000002b73a,
+ 0x2b7400002b81e,
+ 0x2b8200002cea2,
+ 0x2ceb00002ebe1,
+ 0x2ebf00002ee5e,
+ 0x300000003134b,
+ 0x31350000323b0,
+ ),
+ 'CONTEXTJ': (
+ 0x200c0000200e,
+ ),
+ 'CONTEXTO': (
+ 0xb7000000b8,
+ 0x37500000376,
+ 0x5f3000005f5,
+ 0x6600000066a,
+ 0x6f0000006fa,
+ 0x30fb000030fc,
+ ),
+}
diff --git a/Lib/site-packages/idna/intranges.py b/Lib/site-packages/idna/intranges.py
new file mode 100644
index 0000000..6a43b04
--- /dev/null
+++ b/Lib/site-packages/idna/intranges.py
@@ -0,0 +1,54 @@
+"""
+Given a list of integers, made up of (hopefully) a small number of long runs
+of consecutive integers, compute a representation of the form
+((start1, end1), (start2, end2) ...). Then answer the question "was x present
+in the original list?" in time O(log(# runs)).
+"""
+
+import bisect
+from typing import List, Tuple
+
+def intranges_from_list(list_: List[int]) -> Tuple[int, ...]:
+ """Represent a list of integers as a sequence of ranges:
+ ((start_0, end_0), (start_1, end_1), ...), such that the original
+ integers are exactly those x such that start_i <= x < end_i for some i.
+
+ Ranges are encoded as single integers (start << 32 | end), not as tuples.
+ """
+
+ sorted_list = sorted(list_)
+ ranges = []
+ last_write = -1
+ for i in range(len(sorted_list)):
+ if i+1 < len(sorted_list):
+ if sorted_list[i] == sorted_list[i+1]-1:
+ continue
+ current_range = sorted_list[last_write+1:i+1]
+ ranges.append(_encode_range(current_range[0], current_range[-1] + 1))
+ last_write = i
+
+ return tuple(ranges)
+
+def _encode_range(start: int, end: int) -> int:
+ return (start << 32) | end
+
+def _decode_range(r: int) -> Tuple[int, int]:
+ return (r >> 32), (r & ((1 << 32) - 1))
+
+
+def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool:
+ """Determine if `int_` falls into one of the ranges in `ranges`."""
+ tuple_ = _encode_range(int_, 0)
+ pos = bisect.bisect_left(ranges, tuple_)
+ # we could be immediately ahead of a tuple (start, end)
+ # with start < int_ <= end
+ if pos > 0:
+ left, right = _decode_range(ranges[pos-1])
+ if left <= int_ < right:
+ return True
+ # or we could be immediately behind a tuple (int_, end)
+ if pos < len(ranges):
+ left, _ = _decode_range(ranges[pos])
+ if left == int_:
+ return True
+ return False
diff --git a/Lib/site-packages/idna/package_data.py b/Lib/site-packages/idna/package_data.py
new file mode 100644
index 0000000..c5b7220
--- /dev/null
+++ b/Lib/site-packages/idna/package_data.py
@@ -0,0 +1,2 @@
+__version__ = '3.6'
+
diff --git a/Lib/site-packages/idna/py.typed b/Lib/site-packages/idna/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/idna/uts46data.py b/Lib/site-packages/idna/uts46data.py
new file mode 100644
index 0000000..6a1eddb
--- /dev/null
+++ b/Lib/site-packages/idna/uts46data.py
@@ -0,0 +1,8598 @@
+# This file is automatically generated by tools/idna-data
+# vim: set fileencoding=utf-8 :
+
+from typing import List, Tuple, Union
+
+
+"""IDNA Mapping Table from UTS46."""
+
+
+__version__ = '15.1.0'
+def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x0, '3'),
+ (0x1, '3'),
+ (0x2, '3'),
+ (0x3, '3'),
+ (0x4, '3'),
+ (0x5, '3'),
+ (0x6, '3'),
+ (0x7, '3'),
+ (0x8, '3'),
+ (0x9, '3'),
+ (0xA, '3'),
+ (0xB, '3'),
+ (0xC, '3'),
+ (0xD, '3'),
+ (0xE, '3'),
+ (0xF, '3'),
+ (0x10, '3'),
+ (0x11, '3'),
+ (0x12, '3'),
+ (0x13, '3'),
+ (0x14, '3'),
+ (0x15, '3'),
+ (0x16, '3'),
+ (0x17, '3'),
+ (0x18, '3'),
+ (0x19, '3'),
+ (0x1A, '3'),
+ (0x1B, '3'),
+ (0x1C, '3'),
+ (0x1D, '3'),
+ (0x1E, '3'),
+ (0x1F, '3'),
+ (0x20, '3'),
+ (0x21, '3'),
+ (0x22, '3'),
+ (0x23, '3'),
+ (0x24, '3'),
+ (0x25, '3'),
+ (0x26, '3'),
+ (0x27, '3'),
+ (0x28, '3'),
+ (0x29, '3'),
+ (0x2A, '3'),
+ (0x2B, '3'),
+ (0x2C, '3'),
+ (0x2D, 'V'),
+ (0x2E, 'V'),
+ (0x2F, '3'),
+ (0x30, 'V'),
+ (0x31, 'V'),
+ (0x32, 'V'),
+ (0x33, 'V'),
+ (0x34, 'V'),
+ (0x35, 'V'),
+ (0x36, 'V'),
+ (0x37, 'V'),
+ (0x38, 'V'),
+ (0x39, 'V'),
+ (0x3A, '3'),
+ (0x3B, '3'),
+ (0x3C, '3'),
+ (0x3D, '3'),
+ (0x3E, '3'),
+ (0x3F, '3'),
+ (0x40, '3'),
+ (0x41, 'M', 'a'),
+ (0x42, 'M', 'b'),
+ (0x43, 'M', 'c'),
+ (0x44, 'M', 'd'),
+ (0x45, 'M', 'e'),
+ (0x46, 'M', 'f'),
+ (0x47, 'M', 'g'),
+ (0x48, 'M', 'h'),
+ (0x49, 'M', 'i'),
+ (0x4A, 'M', 'j'),
+ (0x4B, 'M', 'k'),
+ (0x4C, 'M', 'l'),
+ (0x4D, 'M', 'm'),
+ (0x4E, 'M', 'n'),
+ (0x4F, 'M', 'o'),
+ (0x50, 'M', 'p'),
+ (0x51, 'M', 'q'),
+ (0x52, 'M', 'r'),
+ (0x53, 'M', 's'),
+ (0x54, 'M', 't'),
+ (0x55, 'M', 'u'),
+ (0x56, 'M', 'v'),
+ (0x57, 'M', 'w'),
+ (0x58, 'M', 'x'),
+ (0x59, 'M', 'y'),
+ (0x5A, 'M', 'z'),
+ (0x5B, '3'),
+ (0x5C, '3'),
+ (0x5D, '3'),
+ (0x5E, '3'),
+ (0x5F, '3'),
+ (0x60, '3'),
+ (0x61, 'V'),
+ (0x62, 'V'),
+ (0x63, 'V'),
+ ]
+
+def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x64, 'V'),
+ (0x65, 'V'),
+ (0x66, 'V'),
+ (0x67, 'V'),
+ (0x68, 'V'),
+ (0x69, 'V'),
+ (0x6A, 'V'),
+ (0x6B, 'V'),
+ (0x6C, 'V'),
+ (0x6D, 'V'),
+ (0x6E, 'V'),
+ (0x6F, 'V'),
+ (0x70, 'V'),
+ (0x71, 'V'),
+ (0x72, 'V'),
+ (0x73, 'V'),
+ (0x74, 'V'),
+ (0x75, 'V'),
+ (0x76, 'V'),
+ (0x77, 'V'),
+ (0x78, 'V'),
+ (0x79, 'V'),
+ (0x7A, 'V'),
+ (0x7B, '3'),
+ (0x7C, '3'),
+ (0x7D, '3'),
+ (0x7E, '3'),
+ (0x7F, '3'),
+ (0x80, 'X'),
+ (0x81, 'X'),
+ (0x82, 'X'),
+ (0x83, 'X'),
+ (0x84, 'X'),
+ (0x85, 'X'),
+ (0x86, 'X'),
+ (0x87, 'X'),
+ (0x88, 'X'),
+ (0x89, 'X'),
+ (0x8A, 'X'),
+ (0x8B, 'X'),
+ (0x8C, 'X'),
+ (0x8D, 'X'),
+ (0x8E, 'X'),
+ (0x8F, 'X'),
+ (0x90, 'X'),
+ (0x91, 'X'),
+ (0x92, 'X'),
+ (0x93, 'X'),
+ (0x94, 'X'),
+ (0x95, 'X'),
+ (0x96, 'X'),
+ (0x97, 'X'),
+ (0x98, 'X'),
+ (0x99, 'X'),
+ (0x9A, 'X'),
+ (0x9B, 'X'),
+ (0x9C, 'X'),
+ (0x9D, 'X'),
+ (0x9E, 'X'),
+ (0x9F, 'X'),
+ (0xA0, '3', ' '),
+ (0xA1, 'V'),
+ (0xA2, 'V'),
+ (0xA3, 'V'),
+ (0xA4, 'V'),
+ (0xA5, 'V'),
+ (0xA6, 'V'),
+ (0xA7, 'V'),
+ (0xA8, '3', ' ̈'),
+ (0xA9, 'V'),
+ (0xAA, 'M', 'a'),
+ (0xAB, 'V'),
+ (0xAC, 'V'),
+ (0xAD, 'I'),
+ (0xAE, 'V'),
+ (0xAF, '3', ' ̄'),
+ (0xB0, 'V'),
+ (0xB1, 'V'),
+ (0xB2, 'M', '2'),
+ (0xB3, 'M', '3'),
+ (0xB4, '3', ' ́'),
+ (0xB5, 'M', 'μ'),
+ (0xB6, 'V'),
+ (0xB7, 'V'),
+ (0xB8, '3', ' ̧'),
+ (0xB9, 'M', '1'),
+ (0xBA, 'M', 'o'),
+ (0xBB, 'V'),
+ (0xBC, 'M', '1⁄4'),
+ (0xBD, 'M', '1⁄2'),
+ (0xBE, 'M', '3⁄4'),
+ (0xBF, 'V'),
+ (0xC0, 'M', 'à'),
+ (0xC1, 'M', 'á'),
+ (0xC2, 'M', 'â'),
+ (0xC3, 'M', 'ã'),
+ (0xC4, 'M', 'ä'),
+ (0xC5, 'M', 'å'),
+ (0xC6, 'M', 'æ'),
+ (0xC7, 'M', 'ç'),
+ ]
+
+def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xC8, 'M', 'è'),
+ (0xC9, 'M', 'é'),
+ (0xCA, 'M', 'ê'),
+ (0xCB, 'M', 'ë'),
+ (0xCC, 'M', 'ì'),
+ (0xCD, 'M', 'í'),
+ (0xCE, 'M', 'î'),
+ (0xCF, 'M', 'ï'),
+ (0xD0, 'M', 'ð'),
+ (0xD1, 'M', 'ñ'),
+ (0xD2, 'M', 'ò'),
+ (0xD3, 'M', 'ó'),
+ (0xD4, 'M', 'ô'),
+ (0xD5, 'M', 'õ'),
+ (0xD6, 'M', 'ö'),
+ (0xD7, 'V'),
+ (0xD8, 'M', 'ø'),
+ (0xD9, 'M', 'ù'),
+ (0xDA, 'M', 'ú'),
+ (0xDB, 'M', 'û'),
+ (0xDC, 'M', 'ü'),
+ (0xDD, 'M', 'ý'),
+ (0xDE, 'M', 'þ'),
+ (0xDF, 'D', 'ss'),
+ (0xE0, 'V'),
+ (0xE1, 'V'),
+ (0xE2, 'V'),
+ (0xE3, 'V'),
+ (0xE4, 'V'),
+ (0xE5, 'V'),
+ (0xE6, 'V'),
+ (0xE7, 'V'),
+ (0xE8, 'V'),
+ (0xE9, 'V'),
+ (0xEA, 'V'),
+ (0xEB, 'V'),
+ (0xEC, 'V'),
+ (0xED, 'V'),
+ (0xEE, 'V'),
+ (0xEF, 'V'),
+ (0xF0, 'V'),
+ (0xF1, 'V'),
+ (0xF2, 'V'),
+ (0xF3, 'V'),
+ (0xF4, 'V'),
+ (0xF5, 'V'),
+ (0xF6, 'V'),
+ (0xF7, 'V'),
+ (0xF8, 'V'),
+ (0xF9, 'V'),
+ (0xFA, 'V'),
+ (0xFB, 'V'),
+ (0xFC, 'V'),
+ (0xFD, 'V'),
+ (0xFE, 'V'),
+ (0xFF, 'V'),
+ (0x100, 'M', 'ā'),
+ (0x101, 'V'),
+ (0x102, 'M', 'ă'),
+ (0x103, 'V'),
+ (0x104, 'M', 'ą'),
+ (0x105, 'V'),
+ (0x106, 'M', 'ć'),
+ (0x107, 'V'),
+ (0x108, 'M', 'ĉ'),
+ (0x109, 'V'),
+ (0x10A, 'M', 'ċ'),
+ (0x10B, 'V'),
+ (0x10C, 'M', 'č'),
+ (0x10D, 'V'),
+ (0x10E, 'M', 'ď'),
+ (0x10F, 'V'),
+ (0x110, 'M', 'đ'),
+ (0x111, 'V'),
+ (0x112, 'M', 'ē'),
+ (0x113, 'V'),
+ (0x114, 'M', 'ĕ'),
+ (0x115, 'V'),
+ (0x116, 'M', 'ė'),
+ (0x117, 'V'),
+ (0x118, 'M', 'ę'),
+ (0x119, 'V'),
+ (0x11A, 'M', 'ě'),
+ (0x11B, 'V'),
+ (0x11C, 'M', 'ĝ'),
+ (0x11D, 'V'),
+ (0x11E, 'M', 'ğ'),
+ (0x11F, 'V'),
+ (0x120, 'M', 'ġ'),
+ (0x121, 'V'),
+ (0x122, 'M', 'ģ'),
+ (0x123, 'V'),
+ (0x124, 'M', 'ĥ'),
+ (0x125, 'V'),
+ (0x126, 'M', 'ħ'),
+ (0x127, 'V'),
+ (0x128, 'M', 'ĩ'),
+ (0x129, 'V'),
+ (0x12A, 'M', 'ī'),
+ (0x12B, 'V'),
+ ]
+
+def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x12C, 'M', 'ĭ'),
+ (0x12D, 'V'),
+ (0x12E, 'M', 'į'),
+ (0x12F, 'V'),
+ (0x130, 'M', 'i̇'),
+ (0x131, 'V'),
+ (0x132, 'M', 'ij'),
+ (0x134, 'M', 'ĵ'),
+ (0x135, 'V'),
+ (0x136, 'M', 'ķ'),
+ (0x137, 'V'),
+ (0x139, 'M', 'ĺ'),
+ (0x13A, 'V'),
+ (0x13B, 'M', 'ļ'),
+ (0x13C, 'V'),
+ (0x13D, 'M', 'ľ'),
+ (0x13E, 'V'),
+ (0x13F, 'M', 'l·'),
+ (0x141, 'M', 'ł'),
+ (0x142, 'V'),
+ (0x143, 'M', 'ń'),
+ (0x144, 'V'),
+ (0x145, 'M', 'ņ'),
+ (0x146, 'V'),
+ (0x147, 'M', 'ň'),
+ (0x148, 'V'),
+ (0x149, 'M', 'ʼn'),
+ (0x14A, 'M', 'ŋ'),
+ (0x14B, 'V'),
+ (0x14C, 'M', 'ō'),
+ (0x14D, 'V'),
+ (0x14E, 'M', 'ŏ'),
+ (0x14F, 'V'),
+ (0x150, 'M', 'ő'),
+ (0x151, 'V'),
+ (0x152, 'M', 'œ'),
+ (0x153, 'V'),
+ (0x154, 'M', 'ŕ'),
+ (0x155, 'V'),
+ (0x156, 'M', 'ŗ'),
+ (0x157, 'V'),
+ (0x158, 'M', 'ř'),
+ (0x159, 'V'),
+ (0x15A, 'M', 'ś'),
+ (0x15B, 'V'),
+ (0x15C, 'M', 'ŝ'),
+ (0x15D, 'V'),
+ (0x15E, 'M', 'ş'),
+ (0x15F, 'V'),
+ (0x160, 'M', 'š'),
+ (0x161, 'V'),
+ (0x162, 'M', 'ţ'),
+ (0x163, 'V'),
+ (0x164, 'M', 'ť'),
+ (0x165, 'V'),
+ (0x166, 'M', 'ŧ'),
+ (0x167, 'V'),
+ (0x168, 'M', 'ũ'),
+ (0x169, 'V'),
+ (0x16A, 'M', 'ū'),
+ (0x16B, 'V'),
+ (0x16C, 'M', 'ŭ'),
+ (0x16D, 'V'),
+ (0x16E, 'M', 'ů'),
+ (0x16F, 'V'),
+ (0x170, 'M', 'ű'),
+ (0x171, 'V'),
+ (0x172, 'M', 'ų'),
+ (0x173, 'V'),
+ (0x174, 'M', 'ŵ'),
+ (0x175, 'V'),
+ (0x176, 'M', 'ŷ'),
+ (0x177, 'V'),
+ (0x178, 'M', 'ÿ'),
+ (0x179, 'M', 'ź'),
+ (0x17A, 'V'),
+ (0x17B, 'M', 'ż'),
+ (0x17C, 'V'),
+ (0x17D, 'M', 'ž'),
+ (0x17E, 'V'),
+ (0x17F, 'M', 's'),
+ (0x180, 'V'),
+ (0x181, 'M', 'ɓ'),
+ (0x182, 'M', 'ƃ'),
+ (0x183, 'V'),
+ (0x184, 'M', 'ƅ'),
+ (0x185, 'V'),
+ (0x186, 'M', 'ɔ'),
+ (0x187, 'M', 'ƈ'),
+ (0x188, 'V'),
+ (0x189, 'M', 'ɖ'),
+ (0x18A, 'M', 'ɗ'),
+ (0x18B, 'M', 'ƌ'),
+ (0x18C, 'V'),
+ (0x18E, 'M', 'ǝ'),
+ (0x18F, 'M', 'ə'),
+ (0x190, 'M', 'ɛ'),
+ (0x191, 'M', 'ƒ'),
+ (0x192, 'V'),
+ (0x193, 'M', 'ɠ'),
+ ]
+
+def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x194, 'M', 'ɣ'),
+ (0x195, 'V'),
+ (0x196, 'M', 'ɩ'),
+ (0x197, 'M', 'ɨ'),
+ (0x198, 'M', 'ƙ'),
+ (0x199, 'V'),
+ (0x19C, 'M', 'ɯ'),
+ (0x19D, 'M', 'ɲ'),
+ (0x19E, 'V'),
+ (0x19F, 'M', 'ɵ'),
+ (0x1A0, 'M', 'ơ'),
+ (0x1A1, 'V'),
+ (0x1A2, 'M', 'ƣ'),
+ (0x1A3, 'V'),
+ (0x1A4, 'M', 'ƥ'),
+ (0x1A5, 'V'),
+ (0x1A6, 'M', 'ʀ'),
+ (0x1A7, 'M', 'ƨ'),
+ (0x1A8, 'V'),
+ (0x1A9, 'M', 'ʃ'),
+ (0x1AA, 'V'),
+ (0x1AC, 'M', 'ƭ'),
+ (0x1AD, 'V'),
+ (0x1AE, 'M', 'ʈ'),
+ (0x1AF, 'M', 'ư'),
+ (0x1B0, 'V'),
+ (0x1B1, 'M', 'ʊ'),
+ (0x1B2, 'M', 'ʋ'),
+ (0x1B3, 'M', 'ƴ'),
+ (0x1B4, 'V'),
+ (0x1B5, 'M', 'ƶ'),
+ (0x1B6, 'V'),
+ (0x1B7, 'M', 'ʒ'),
+ (0x1B8, 'M', 'ƹ'),
+ (0x1B9, 'V'),
+ (0x1BC, 'M', 'ƽ'),
+ (0x1BD, 'V'),
+ (0x1C4, 'M', 'dž'),
+ (0x1C7, 'M', 'lj'),
+ (0x1CA, 'M', 'nj'),
+ (0x1CD, 'M', 'ǎ'),
+ (0x1CE, 'V'),
+ (0x1CF, 'M', 'ǐ'),
+ (0x1D0, 'V'),
+ (0x1D1, 'M', 'ǒ'),
+ (0x1D2, 'V'),
+ (0x1D3, 'M', 'ǔ'),
+ (0x1D4, 'V'),
+ (0x1D5, 'M', 'ǖ'),
+ (0x1D6, 'V'),
+ (0x1D7, 'M', 'ǘ'),
+ (0x1D8, 'V'),
+ (0x1D9, 'M', 'ǚ'),
+ (0x1DA, 'V'),
+ (0x1DB, 'M', 'ǜ'),
+ (0x1DC, 'V'),
+ (0x1DE, 'M', 'ǟ'),
+ (0x1DF, 'V'),
+ (0x1E0, 'M', 'ǡ'),
+ (0x1E1, 'V'),
+ (0x1E2, 'M', 'ǣ'),
+ (0x1E3, 'V'),
+ (0x1E4, 'M', 'ǥ'),
+ (0x1E5, 'V'),
+ (0x1E6, 'M', 'ǧ'),
+ (0x1E7, 'V'),
+ (0x1E8, 'M', 'ǩ'),
+ (0x1E9, 'V'),
+ (0x1EA, 'M', 'ǫ'),
+ (0x1EB, 'V'),
+ (0x1EC, 'M', 'ǭ'),
+ (0x1ED, 'V'),
+ (0x1EE, 'M', 'ǯ'),
+ (0x1EF, 'V'),
+ (0x1F1, 'M', 'dz'),
+ (0x1F4, 'M', 'ǵ'),
+ (0x1F5, 'V'),
+ (0x1F6, 'M', 'ƕ'),
+ (0x1F7, 'M', 'ƿ'),
+ (0x1F8, 'M', 'ǹ'),
+ (0x1F9, 'V'),
+ (0x1FA, 'M', 'ǻ'),
+ (0x1FB, 'V'),
+ (0x1FC, 'M', 'ǽ'),
+ (0x1FD, 'V'),
+ (0x1FE, 'M', 'ǿ'),
+ (0x1FF, 'V'),
+ (0x200, 'M', 'ȁ'),
+ (0x201, 'V'),
+ (0x202, 'M', 'ȃ'),
+ (0x203, 'V'),
+ (0x204, 'M', 'ȅ'),
+ (0x205, 'V'),
+ (0x206, 'M', 'ȇ'),
+ (0x207, 'V'),
+ (0x208, 'M', 'ȉ'),
+ (0x209, 'V'),
+ (0x20A, 'M', 'ȋ'),
+ (0x20B, 'V'),
+ (0x20C, 'M', 'ȍ'),
+ ]
+
+def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x20D, 'V'),
+ (0x20E, 'M', 'ȏ'),
+ (0x20F, 'V'),
+ (0x210, 'M', 'ȑ'),
+ (0x211, 'V'),
+ (0x212, 'M', 'ȓ'),
+ (0x213, 'V'),
+ (0x214, 'M', 'ȕ'),
+ (0x215, 'V'),
+ (0x216, 'M', 'ȗ'),
+ (0x217, 'V'),
+ (0x218, 'M', 'ș'),
+ (0x219, 'V'),
+ (0x21A, 'M', 'ț'),
+ (0x21B, 'V'),
+ (0x21C, 'M', 'ȝ'),
+ (0x21D, 'V'),
+ (0x21E, 'M', 'ȟ'),
+ (0x21F, 'V'),
+ (0x220, 'M', 'ƞ'),
+ (0x221, 'V'),
+ (0x222, 'M', 'ȣ'),
+ (0x223, 'V'),
+ (0x224, 'M', 'ȥ'),
+ (0x225, 'V'),
+ (0x226, 'M', 'ȧ'),
+ (0x227, 'V'),
+ (0x228, 'M', 'ȩ'),
+ (0x229, 'V'),
+ (0x22A, 'M', 'ȫ'),
+ (0x22B, 'V'),
+ (0x22C, 'M', 'ȭ'),
+ (0x22D, 'V'),
+ (0x22E, 'M', 'ȯ'),
+ (0x22F, 'V'),
+ (0x230, 'M', 'ȱ'),
+ (0x231, 'V'),
+ (0x232, 'M', 'ȳ'),
+ (0x233, 'V'),
+ (0x23A, 'M', 'ⱥ'),
+ (0x23B, 'M', 'ȼ'),
+ (0x23C, 'V'),
+ (0x23D, 'M', 'ƚ'),
+ (0x23E, 'M', 'ⱦ'),
+ (0x23F, 'V'),
+ (0x241, 'M', 'ɂ'),
+ (0x242, 'V'),
+ (0x243, 'M', 'ƀ'),
+ (0x244, 'M', 'ʉ'),
+ (0x245, 'M', 'ʌ'),
+ (0x246, 'M', 'ɇ'),
+ (0x247, 'V'),
+ (0x248, 'M', 'ɉ'),
+ (0x249, 'V'),
+ (0x24A, 'M', 'ɋ'),
+ (0x24B, 'V'),
+ (0x24C, 'M', 'ɍ'),
+ (0x24D, 'V'),
+ (0x24E, 'M', 'ɏ'),
+ (0x24F, 'V'),
+ (0x2B0, 'M', 'h'),
+ (0x2B1, 'M', 'ɦ'),
+ (0x2B2, 'M', 'j'),
+ (0x2B3, 'M', 'r'),
+ (0x2B4, 'M', 'ɹ'),
+ (0x2B5, 'M', 'ɻ'),
+ (0x2B6, 'M', 'ʁ'),
+ (0x2B7, 'M', 'w'),
+ (0x2B8, 'M', 'y'),
+ (0x2B9, 'V'),
+ (0x2D8, '3', ' ̆'),
+ (0x2D9, '3', ' ̇'),
+ (0x2DA, '3', ' ̊'),
+ (0x2DB, '3', ' ̨'),
+ (0x2DC, '3', ' ̃'),
+ (0x2DD, '3', ' ̋'),
+ (0x2DE, 'V'),
+ (0x2E0, 'M', 'ɣ'),
+ (0x2E1, 'M', 'l'),
+ (0x2E2, 'M', 's'),
+ (0x2E3, 'M', 'x'),
+ (0x2E4, 'M', 'ʕ'),
+ (0x2E5, 'V'),
+ (0x340, 'M', '̀'),
+ (0x341, 'M', '́'),
+ (0x342, 'V'),
+ (0x343, 'M', '̓'),
+ (0x344, 'M', '̈́'),
+ (0x345, 'M', 'ι'),
+ (0x346, 'V'),
+ (0x34F, 'I'),
+ (0x350, 'V'),
+ (0x370, 'M', 'ͱ'),
+ (0x371, 'V'),
+ (0x372, 'M', 'ͳ'),
+ (0x373, 'V'),
+ (0x374, 'M', 'ʹ'),
+ (0x375, 'V'),
+ (0x376, 'M', 'ͷ'),
+ (0x377, 'V'),
+ ]
+
+def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x378, 'X'),
+ (0x37A, '3', ' ι'),
+ (0x37B, 'V'),
+ (0x37E, '3', ';'),
+ (0x37F, 'M', 'ϳ'),
+ (0x380, 'X'),
+ (0x384, '3', ' ́'),
+ (0x385, '3', ' ̈́'),
+ (0x386, 'M', 'ά'),
+ (0x387, 'M', '·'),
+ (0x388, 'M', 'έ'),
+ (0x389, 'M', 'ή'),
+ (0x38A, 'M', 'ί'),
+ (0x38B, 'X'),
+ (0x38C, 'M', 'ό'),
+ (0x38D, 'X'),
+ (0x38E, 'M', 'ύ'),
+ (0x38F, 'M', 'ώ'),
+ (0x390, 'V'),
+ (0x391, 'M', 'α'),
+ (0x392, 'M', 'β'),
+ (0x393, 'M', 'γ'),
+ (0x394, 'M', 'δ'),
+ (0x395, 'M', 'ε'),
+ (0x396, 'M', 'ζ'),
+ (0x397, 'M', 'η'),
+ (0x398, 'M', 'θ'),
+ (0x399, 'M', 'ι'),
+ (0x39A, 'M', 'κ'),
+ (0x39B, 'M', 'λ'),
+ (0x39C, 'M', 'μ'),
+ (0x39D, 'M', 'ν'),
+ (0x39E, 'M', 'ξ'),
+ (0x39F, 'M', 'ο'),
+ (0x3A0, 'M', 'π'),
+ (0x3A1, 'M', 'ρ'),
+ (0x3A2, 'X'),
+ (0x3A3, 'M', 'σ'),
+ (0x3A4, 'M', 'τ'),
+ (0x3A5, 'M', 'υ'),
+ (0x3A6, 'M', 'φ'),
+ (0x3A7, 'M', 'χ'),
+ (0x3A8, 'M', 'ψ'),
+ (0x3A9, 'M', 'ω'),
+ (0x3AA, 'M', 'ϊ'),
+ (0x3AB, 'M', 'ϋ'),
+ (0x3AC, 'V'),
+ (0x3C2, 'D', 'σ'),
+ (0x3C3, 'V'),
+ (0x3CF, 'M', 'ϗ'),
+ (0x3D0, 'M', 'β'),
+ (0x3D1, 'M', 'θ'),
+ (0x3D2, 'M', 'υ'),
+ (0x3D3, 'M', 'ύ'),
+ (0x3D4, 'M', 'ϋ'),
+ (0x3D5, 'M', 'φ'),
+ (0x3D6, 'M', 'π'),
+ (0x3D7, 'V'),
+ (0x3D8, 'M', 'ϙ'),
+ (0x3D9, 'V'),
+ (0x3DA, 'M', 'ϛ'),
+ (0x3DB, 'V'),
+ (0x3DC, 'M', 'ϝ'),
+ (0x3DD, 'V'),
+ (0x3DE, 'M', 'ϟ'),
+ (0x3DF, 'V'),
+ (0x3E0, 'M', 'ϡ'),
+ (0x3E1, 'V'),
+ (0x3E2, 'M', 'ϣ'),
+ (0x3E3, 'V'),
+ (0x3E4, 'M', 'ϥ'),
+ (0x3E5, 'V'),
+ (0x3E6, 'M', 'ϧ'),
+ (0x3E7, 'V'),
+ (0x3E8, 'M', 'ϩ'),
+ (0x3E9, 'V'),
+ (0x3EA, 'M', 'ϫ'),
+ (0x3EB, 'V'),
+ (0x3EC, 'M', 'ϭ'),
+ (0x3ED, 'V'),
+ (0x3EE, 'M', 'ϯ'),
+ (0x3EF, 'V'),
+ (0x3F0, 'M', 'κ'),
+ (0x3F1, 'M', 'ρ'),
+ (0x3F2, 'M', 'σ'),
+ (0x3F3, 'V'),
+ (0x3F4, 'M', 'θ'),
+ (0x3F5, 'M', 'ε'),
+ (0x3F6, 'V'),
+ (0x3F7, 'M', 'ϸ'),
+ (0x3F8, 'V'),
+ (0x3F9, 'M', 'σ'),
+ (0x3FA, 'M', 'ϻ'),
+ (0x3FB, 'V'),
+ (0x3FD, 'M', 'ͻ'),
+ (0x3FE, 'M', 'ͼ'),
+ (0x3FF, 'M', 'ͽ'),
+ (0x400, 'M', 'ѐ'),
+ (0x401, 'M', 'ё'),
+ (0x402, 'M', 'ђ'),
+ ]
+
+def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x403, 'M', 'ѓ'),
+ (0x404, 'M', 'є'),
+ (0x405, 'M', 'ѕ'),
+ (0x406, 'M', 'і'),
+ (0x407, 'M', 'ї'),
+ (0x408, 'M', 'ј'),
+ (0x409, 'M', 'љ'),
+ (0x40A, 'M', 'њ'),
+ (0x40B, 'M', 'ћ'),
+ (0x40C, 'M', 'ќ'),
+ (0x40D, 'M', 'ѝ'),
+ (0x40E, 'M', 'ў'),
+ (0x40F, 'M', 'џ'),
+ (0x410, 'M', 'а'),
+ (0x411, 'M', 'б'),
+ (0x412, 'M', 'в'),
+ (0x413, 'M', 'г'),
+ (0x414, 'M', 'д'),
+ (0x415, 'M', 'е'),
+ (0x416, 'M', 'ж'),
+ (0x417, 'M', 'з'),
+ (0x418, 'M', 'и'),
+ (0x419, 'M', 'й'),
+ (0x41A, 'M', 'к'),
+ (0x41B, 'M', 'л'),
+ (0x41C, 'M', 'м'),
+ (0x41D, 'M', 'н'),
+ (0x41E, 'M', 'о'),
+ (0x41F, 'M', 'п'),
+ (0x420, 'M', 'р'),
+ (0x421, 'M', 'с'),
+ (0x422, 'M', 'т'),
+ (0x423, 'M', 'у'),
+ (0x424, 'M', 'ф'),
+ (0x425, 'M', 'х'),
+ (0x426, 'M', 'ц'),
+ (0x427, 'M', 'ч'),
+ (0x428, 'M', 'ш'),
+ (0x429, 'M', 'щ'),
+ (0x42A, 'M', 'ъ'),
+ (0x42B, 'M', 'ы'),
+ (0x42C, 'M', 'ь'),
+ (0x42D, 'M', 'э'),
+ (0x42E, 'M', 'ю'),
+ (0x42F, 'M', 'я'),
+ (0x430, 'V'),
+ (0x460, 'M', 'ѡ'),
+ (0x461, 'V'),
+ (0x462, 'M', 'ѣ'),
+ (0x463, 'V'),
+ (0x464, 'M', 'ѥ'),
+ (0x465, 'V'),
+ (0x466, 'M', 'ѧ'),
+ (0x467, 'V'),
+ (0x468, 'M', 'ѩ'),
+ (0x469, 'V'),
+ (0x46A, 'M', 'ѫ'),
+ (0x46B, 'V'),
+ (0x46C, 'M', 'ѭ'),
+ (0x46D, 'V'),
+ (0x46E, 'M', 'ѯ'),
+ (0x46F, 'V'),
+ (0x470, 'M', 'ѱ'),
+ (0x471, 'V'),
+ (0x472, 'M', 'ѳ'),
+ (0x473, 'V'),
+ (0x474, 'M', 'ѵ'),
+ (0x475, 'V'),
+ (0x476, 'M', 'ѷ'),
+ (0x477, 'V'),
+ (0x478, 'M', 'ѹ'),
+ (0x479, 'V'),
+ (0x47A, 'M', 'ѻ'),
+ (0x47B, 'V'),
+ (0x47C, 'M', 'ѽ'),
+ (0x47D, 'V'),
+ (0x47E, 'M', 'ѿ'),
+ (0x47F, 'V'),
+ (0x480, 'M', 'ҁ'),
+ (0x481, 'V'),
+ (0x48A, 'M', 'ҋ'),
+ (0x48B, 'V'),
+ (0x48C, 'M', 'ҍ'),
+ (0x48D, 'V'),
+ (0x48E, 'M', 'ҏ'),
+ (0x48F, 'V'),
+ (0x490, 'M', 'ґ'),
+ (0x491, 'V'),
+ (0x492, 'M', 'ғ'),
+ (0x493, 'V'),
+ (0x494, 'M', 'ҕ'),
+ (0x495, 'V'),
+ (0x496, 'M', 'җ'),
+ (0x497, 'V'),
+ (0x498, 'M', 'ҙ'),
+ (0x499, 'V'),
+ (0x49A, 'M', 'қ'),
+ (0x49B, 'V'),
+ (0x49C, 'M', 'ҝ'),
+ (0x49D, 'V'),
+ ]
+
+def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x49E, 'M', 'ҟ'),
+ (0x49F, 'V'),
+ (0x4A0, 'M', 'ҡ'),
+ (0x4A1, 'V'),
+ (0x4A2, 'M', 'ң'),
+ (0x4A3, 'V'),
+ (0x4A4, 'M', 'ҥ'),
+ (0x4A5, 'V'),
+ (0x4A6, 'M', 'ҧ'),
+ (0x4A7, 'V'),
+ (0x4A8, 'M', 'ҩ'),
+ (0x4A9, 'V'),
+ (0x4AA, 'M', 'ҫ'),
+ (0x4AB, 'V'),
+ (0x4AC, 'M', 'ҭ'),
+ (0x4AD, 'V'),
+ (0x4AE, 'M', 'ү'),
+ (0x4AF, 'V'),
+ (0x4B0, 'M', 'ұ'),
+ (0x4B1, 'V'),
+ (0x4B2, 'M', 'ҳ'),
+ (0x4B3, 'V'),
+ (0x4B4, 'M', 'ҵ'),
+ (0x4B5, 'V'),
+ (0x4B6, 'M', 'ҷ'),
+ (0x4B7, 'V'),
+ (0x4B8, 'M', 'ҹ'),
+ (0x4B9, 'V'),
+ (0x4BA, 'M', 'һ'),
+ (0x4BB, 'V'),
+ (0x4BC, 'M', 'ҽ'),
+ (0x4BD, 'V'),
+ (0x4BE, 'M', 'ҿ'),
+ (0x4BF, 'V'),
+ (0x4C0, 'X'),
+ (0x4C1, 'M', 'ӂ'),
+ (0x4C2, 'V'),
+ (0x4C3, 'M', 'ӄ'),
+ (0x4C4, 'V'),
+ (0x4C5, 'M', 'ӆ'),
+ (0x4C6, 'V'),
+ (0x4C7, 'M', 'ӈ'),
+ (0x4C8, 'V'),
+ (0x4C9, 'M', 'ӊ'),
+ (0x4CA, 'V'),
+ (0x4CB, 'M', 'ӌ'),
+ (0x4CC, 'V'),
+ (0x4CD, 'M', 'ӎ'),
+ (0x4CE, 'V'),
+ (0x4D0, 'M', 'ӑ'),
+ (0x4D1, 'V'),
+ (0x4D2, 'M', 'ӓ'),
+ (0x4D3, 'V'),
+ (0x4D4, 'M', 'ӕ'),
+ (0x4D5, 'V'),
+ (0x4D6, 'M', 'ӗ'),
+ (0x4D7, 'V'),
+ (0x4D8, 'M', 'ә'),
+ (0x4D9, 'V'),
+ (0x4DA, 'M', 'ӛ'),
+ (0x4DB, 'V'),
+ (0x4DC, 'M', 'ӝ'),
+ (0x4DD, 'V'),
+ (0x4DE, 'M', 'ӟ'),
+ (0x4DF, 'V'),
+ (0x4E0, 'M', 'ӡ'),
+ (0x4E1, 'V'),
+ (0x4E2, 'M', 'ӣ'),
+ (0x4E3, 'V'),
+ (0x4E4, 'M', 'ӥ'),
+ (0x4E5, 'V'),
+ (0x4E6, 'M', 'ӧ'),
+ (0x4E7, 'V'),
+ (0x4E8, 'M', 'ө'),
+ (0x4E9, 'V'),
+ (0x4EA, 'M', 'ӫ'),
+ (0x4EB, 'V'),
+ (0x4EC, 'M', 'ӭ'),
+ (0x4ED, 'V'),
+ (0x4EE, 'M', 'ӯ'),
+ (0x4EF, 'V'),
+ (0x4F0, 'M', 'ӱ'),
+ (0x4F1, 'V'),
+ (0x4F2, 'M', 'ӳ'),
+ (0x4F3, 'V'),
+ (0x4F4, 'M', 'ӵ'),
+ (0x4F5, 'V'),
+ (0x4F6, 'M', 'ӷ'),
+ (0x4F7, 'V'),
+ (0x4F8, 'M', 'ӹ'),
+ (0x4F9, 'V'),
+ (0x4FA, 'M', 'ӻ'),
+ (0x4FB, 'V'),
+ (0x4FC, 'M', 'ӽ'),
+ (0x4FD, 'V'),
+ (0x4FE, 'M', 'ӿ'),
+ (0x4FF, 'V'),
+ (0x500, 'M', 'ԁ'),
+ (0x501, 'V'),
+ (0x502, 'M', 'ԃ'),
+ ]
+
+def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x503, 'V'),
+ (0x504, 'M', 'ԅ'),
+ (0x505, 'V'),
+ (0x506, 'M', 'ԇ'),
+ (0x507, 'V'),
+ (0x508, 'M', 'ԉ'),
+ (0x509, 'V'),
+ (0x50A, 'M', 'ԋ'),
+ (0x50B, 'V'),
+ (0x50C, 'M', 'ԍ'),
+ (0x50D, 'V'),
+ (0x50E, 'M', 'ԏ'),
+ (0x50F, 'V'),
+ (0x510, 'M', 'ԑ'),
+ (0x511, 'V'),
+ (0x512, 'M', 'ԓ'),
+ (0x513, 'V'),
+ (0x514, 'M', 'ԕ'),
+ (0x515, 'V'),
+ (0x516, 'M', 'ԗ'),
+ (0x517, 'V'),
+ (0x518, 'M', 'ԙ'),
+ (0x519, 'V'),
+ (0x51A, 'M', 'ԛ'),
+ (0x51B, 'V'),
+ (0x51C, 'M', 'ԝ'),
+ (0x51D, 'V'),
+ (0x51E, 'M', 'ԟ'),
+ (0x51F, 'V'),
+ (0x520, 'M', 'ԡ'),
+ (0x521, 'V'),
+ (0x522, 'M', 'ԣ'),
+ (0x523, 'V'),
+ (0x524, 'M', 'ԥ'),
+ (0x525, 'V'),
+ (0x526, 'M', 'ԧ'),
+ (0x527, 'V'),
+ (0x528, 'M', 'ԩ'),
+ (0x529, 'V'),
+ (0x52A, 'M', 'ԫ'),
+ (0x52B, 'V'),
+ (0x52C, 'M', 'ԭ'),
+ (0x52D, 'V'),
+ (0x52E, 'M', 'ԯ'),
+ (0x52F, 'V'),
+ (0x530, 'X'),
+ (0x531, 'M', 'ա'),
+ (0x532, 'M', 'բ'),
+ (0x533, 'M', 'գ'),
+ (0x534, 'M', 'դ'),
+ (0x535, 'M', 'ե'),
+ (0x536, 'M', 'զ'),
+ (0x537, 'M', 'է'),
+ (0x538, 'M', 'ը'),
+ (0x539, 'M', 'թ'),
+ (0x53A, 'M', 'ժ'),
+ (0x53B, 'M', 'ի'),
+ (0x53C, 'M', 'լ'),
+ (0x53D, 'M', 'խ'),
+ (0x53E, 'M', 'ծ'),
+ (0x53F, 'M', 'կ'),
+ (0x540, 'M', 'հ'),
+ (0x541, 'M', 'ձ'),
+ (0x542, 'M', 'ղ'),
+ (0x543, 'M', 'ճ'),
+ (0x544, 'M', 'մ'),
+ (0x545, 'M', 'յ'),
+ (0x546, 'M', 'ն'),
+ (0x547, 'M', 'շ'),
+ (0x548, 'M', 'ո'),
+ (0x549, 'M', 'չ'),
+ (0x54A, 'M', 'պ'),
+ (0x54B, 'M', 'ջ'),
+ (0x54C, 'M', 'ռ'),
+ (0x54D, 'M', 'ս'),
+ (0x54E, 'M', 'վ'),
+ (0x54F, 'M', 'տ'),
+ (0x550, 'M', 'ր'),
+ (0x551, 'M', 'ց'),
+ (0x552, 'M', 'ւ'),
+ (0x553, 'M', 'փ'),
+ (0x554, 'M', 'ք'),
+ (0x555, 'M', 'օ'),
+ (0x556, 'M', 'ֆ'),
+ (0x557, 'X'),
+ (0x559, 'V'),
+ (0x587, 'M', 'եւ'),
+ (0x588, 'V'),
+ (0x58B, 'X'),
+ (0x58D, 'V'),
+ (0x590, 'X'),
+ (0x591, 'V'),
+ (0x5C8, 'X'),
+ (0x5D0, 'V'),
+ (0x5EB, 'X'),
+ (0x5EF, 'V'),
+ (0x5F5, 'X'),
+ (0x606, 'V'),
+ (0x61C, 'X'),
+ (0x61D, 'V'),
+ ]
+
+def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x675, 'M', 'اٴ'),
+ (0x676, 'M', 'وٴ'),
+ (0x677, 'M', 'ۇٴ'),
+ (0x678, 'M', 'يٴ'),
+ (0x679, 'V'),
+ (0x6DD, 'X'),
+ (0x6DE, 'V'),
+ (0x70E, 'X'),
+ (0x710, 'V'),
+ (0x74B, 'X'),
+ (0x74D, 'V'),
+ (0x7B2, 'X'),
+ (0x7C0, 'V'),
+ (0x7FB, 'X'),
+ (0x7FD, 'V'),
+ (0x82E, 'X'),
+ (0x830, 'V'),
+ (0x83F, 'X'),
+ (0x840, 'V'),
+ (0x85C, 'X'),
+ (0x85E, 'V'),
+ (0x85F, 'X'),
+ (0x860, 'V'),
+ (0x86B, 'X'),
+ (0x870, 'V'),
+ (0x88F, 'X'),
+ (0x898, 'V'),
+ (0x8E2, 'X'),
+ (0x8E3, 'V'),
+ (0x958, 'M', 'क़'),
+ (0x959, 'M', 'ख़'),
+ (0x95A, 'M', 'ग़'),
+ (0x95B, 'M', 'ज़'),
+ (0x95C, 'M', 'ड़'),
+ (0x95D, 'M', 'ढ़'),
+ (0x95E, 'M', 'फ़'),
+ (0x95F, 'M', 'य़'),
+ (0x960, 'V'),
+ (0x984, 'X'),
+ (0x985, 'V'),
+ (0x98D, 'X'),
+ (0x98F, 'V'),
+ (0x991, 'X'),
+ (0x993, 'V'),
+ (0x9A9, 'X'),
+ (0x9AA, 'V'),
+ (0x9B1, 'X'),
+ (0x9B2, 'V'),
+ (0x9B3, 'X'),
+ (0x9B6, 'V'),
+ (0x9BA, 'X'),
+ (0x9BC, 'V'),
+ (0x9C5, 'X'),
+ (0x9C7, 'V'),
+ (0x9C9, 'X'),
+ (0x9CB, 'V'),
+ (0x9CF, 'X'),
+ (0x9D7, 'V'),
+ (0x9D8, 'X'),
+ (0x9DC, 'M', 'ড়'),
+ (0x9DD, 'M', 'ঢ়'),
+ (0x9DE, 'X'),
+ (0x9DF, 'M', 'য়'),
+ (0x9E0, 'V'),
+ (0x9E4, 'X'),
+ (0x9E6, 'V'),
+ (0x9FF, 'X'),
+ (0xA01, 'V'),
+ (0xA04, 'X'),
+ (0xA05, 'V'),
+ (0xA0B, 'X'),
+ (0xA0F, 'V'),
+ (0xA11, 'X'),
+ (0xA13, 'V'),
+ (0xA29, 'X'),
+ (0xA2A, 'V'),
+ (0xA31, 'X'),
+ (0xA32, 'V'),
+ (0xA33, 'M', 'ਲ਼'),
+ (0xA34, 'X'),
+ (0xA35, 'V'),
+ (0xA36, 'M', 'ਸ਼'),
+ (0xA37, 'X'),
+ (0xA38, 'V'),
+ (0xA3A, 'X'),
+ (0xA3C, 'V'),
+ (0xA3D, 'X'),
+ (0xA3E, 'V'),
+ (0xA43, 'X'),
+ (0xA47, 'V'),
+ (0xA49, 'X'),
+ (0xA4B, 'V'),
+ (0xA4E, 'X'),
+ (0xA51, 'V'),
+ (0xA52, 'X'),
+ (0xA59, 'M', 'ਖ਼'),
+ (0xA5A, 'M', 'ਗ਼'),
+ (0xA5B, 'M', 'ਜ਼'),
+ (0xA5C, 'V'),
+ (0xA5D, 'X'),
+ ]
+
+def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xA5E, 'M', 'ਫ਼'),
+ (0xA5F, 'X'),
+ (0xA66, 'V'),
+ (0xA77, 'X'),
+ (0xA81, 'V'),
+ (0xA84, 'X'),
+ (0xA85, 'V'),
+ (0xA8E, 'X'),
+ (0xA8F, 'V'),
+ (0xA92, 'X'),
+ (0xA93, 'V'),
+ (0xAA9, 'X'),
+ (0xAAA, 'V'),
+ (0xAB1, 'X'),
+ (0xAB2, 'V'),
+ (0xAB4, 'X'),
+ (0xAB5, 'V'),
+ (0xABA, 'X'),
+ (0xABC, 'V'),
+ (0xAC6, 'X'),
+ (0xAC7, 'V'),
+ (0xACA, 'X'),
+ (0xACB, 'V'),
+ (0xACE, 'X'),
+ (0xAD0, 'V'),
+ (0xAD1, 'X'),
+ (0xAE0, 'V'),
+ (0xAE4, 'X'),
+ (0xAE6, 'V'),
+ (0xAF2, 'X'),
+ (0xAF9, 'V'),
+ (0xB00, 'X'),
+ (0xB01, 'V'),
+ (0xB04, 'X'),
+ (0xB05, 'V'),
+ (0xB0D, 'X'),
+ (0xB0F, 'V'),
+ (0xB11, 'X'),
+ (0xB13, 'V'),
+ (0xB29, 'X'),
+ (0xB2A, 'V'),
+ (0xB31, 'X'),
+ (0xB32, 'V'),
+ (0xB34, 'X'),
+ (0xB35, 'V'),
+ (0xB3A, 'X'),
+ (0xB3C, 'V'),
+ (0xB45, 'X'),
+ (0xB47, 'V'),
+ (0xB49, 'X'),
+ (0xB4B, 'V'),
+ (0xB4E, 'X'),
+ (0xB55, 'V'),
+ (0xB58, 'X'),
+ (0xB5C, 'M', 'ଡ଼'),
+ (0xB5D, 'M', 'ଢ଼'),
+ (0xB5E, 'X'),
+ (0xB5F, 'V'),
+ (0xB64, 'X'),
+ (0xB66, 'V'),
+ (0xB78, 'X'),
+ (0xB82, 'V'),
+ (0xB84, 'X'),
+ (0xB85, 'V'),
+ (0xB8B, 'X'),
+ (0xB8E, 'V'),
+ (0xB91, 'X'),
+ (0xB92, 'V'),
+ (0xB96, 'X'),
+ (0xB99, 'V'),
+ (0xB9B, 'X'),
+ (0xB9C, 'V'),
+ (0xB9D, 'X'),
+ (0xB9E, 'V'),
+ (0xBA0, 'X'),
+ (0xBA3, 'V'),
+ (0xBA5, 'X'),
+ (0xBA8, 'V'),
+ (0xBAB, 'X'),
+ (0xBAE, 'V'),
+ (0xBBA, 'X'),
+ (0xBBE, 'V'),
+ (0xBC3, 'X'),
+ (0xBC6, 'V'),
+ (0xBC9, 'X'),
+ (0xBCA, 'V'),
+ (0xBCE, 'X'),
+ (0xBD0, 'V'),
+ (0xBD1, 'X'),
+ (0xBD7, 'V'),
+ (0xBD8, 'X'),
+ (0xBE6, 'V'),
+ (0xBFB, 'X'),
+ (0xC00, 'V'),
+ (0xC0D, 'X'),
+ (0xC0E, 'V'),
+ (0xC11, 'X'),
+ (0xC12, 'V'),
+ (0xC29, 'X'),
+ (0xC2A, 'V'),
+ ]
+
+def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xC3A, 'X'),
+ (0xC3C, 'V'),
+ (0xC45, 'X'),
+ (0xC46, 'V'),
+ (0xC49, 'X'),
+ (0xC4A, 'V'),
+ (0xC4E, 'X'),
+ (0xC55, 'V'),
+ (0xC57, 'X'),
+ (0xC58, 'V'),
+ (0xC5B, 'X'),
+ (0xC5D, 'V'),
+ (0xC5E, 'X'),
+ (0xC60, 'V'),
+ (0xC64, 'X'),
+ (0xC66, 'V'),
+ (0xC70, 'X'),
+ (0xC77, 'V'),
+ (0xC8D, 'X'),
+ (0xC8E, 'V'),
+ (0xC91, 'X'),
+ (0xC92, 'V'),
+ (0xCA9, 'X'),
+ (0xCAA, 'V'),
+ (0xCB4, 'X'),
+ (0xCB5, 'V'),
+ (0xCBA, 'X'),
+ (0xCBC, 'V'),
+ (0xCC5, 'X'),
+ (0xCC6, 'V'),
+ (0xCC9, 'X'),
+ (0xCCA, 'V'),
+ (0xCCE, 'X'),
+ (0xCD5, 'V'),
+ (0xCD7, 'X'),
+ (0xCDD, 'V'),
+ (0xCDF, 'X'),
+ (0xCE0, 'V'),
+ (0xCE4, 'X'),
+ (0xCE6, 'V'),
+ (0xCF0, 'X'),
+ (0xCF1, 'V'),
+ (0xCF4, 'X'),
+ (0xD00, 'V'),
+ (0xD0D, 'X'),
+ (0xD0E, 'V'),
+ (0xD11, 'X'),
+ (0xD12, 'V'),
+ (0xD45, 'X'),
+ (0xD46, 'V'),
+ (0xD49, 'X'),
+ (0xD4A, 'V'),
+ (0xD50, 'X'),
+ (0xD54, 'V'),
+ (0xD64, 'X'),
+ (0xD66, 'V'),
+ (0xD80, 'X'),
+ (0xD81, 'V'),
+ (0xD84, 'X'),
+ (0xD85, 'V'),
+ (0xD97, 'X'),
+ (0xD9A, 'V'),
+ (0xDB2, 'X'),
+ (0xDB3, 'V'),
+ (0xDBC, 'X'),
+ (0xDBD, 'V'),
+ (0xDBE, 'X'),
+ (0xDC0, 'V'),
+ (0xDC7, 'X'),
+ (0xDCA, 'V'),
+ (0xDCB, 'X'),
+ (0xDCF, 'V'),
+ (0xDD5, 'X'),
+ (0xDD6, 'V'),
+ (0xDD7, 'X'),
+ (0xDD8, 'V'),
+ (0xDE0, 'X'),
+ (0xDE6, 'V'),
+ (0xDF0, 'X'),
+ (0xDF2, 'V'),
+ (0xDF5, 'X'),
+ (0xE01, 'V'),
+ (0xE33, 'M', 'ํา'),
+ (0xE34, 'V'),
+ (0xE3B, 'X'),
+ (0xE3F, 'V'),
+ (0xE5C, 'X'),
+ (0xE81, 'V'),
+ (0xE83, 'X'),
+ (0xE84, 'V'),
+ (0xE85, 'X'),
+ (0xE86, 'V'),
+ (0xE8B, 'X'),
+ (0xE8C, 'V'),
+ (0xEA4, 'X'),
+ (0xEA5, 'V'),
+ (0xEA6, 'X'),
+ (0xEA7, 'V'),
+ (0xEB3, 'M', 'ໍາ'),
+ (0xEB4, 'V'),
+ ]
+
+def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xEBE, 'X'),
+ (0xEC0, 'V'),
+ (0xEC5, 'X'),
+ (0xEC6, 'V'),
+ (0xEC7, 'X'),
+ (0xEC8, 'V'),
+ (0xECF, 'X'),
+ (0xED0, 'V'),
+ (0xEDA, 'X'),
+ (0xEDC, 'M', 'ຫນ'),
+ (0xEDD, 'M', 'ຫມ'),
+ (0xEDE, 'V'),
+ (0xEE0, 'X'),
+ (0xF00, 'V'),
+ (0xF0C, 'M', '་'),
+ (0xF0D, 'V'),
+ (0xF43, 'M', 'གྷ'),
+ (0xF44, 'V'),
+ (0xF48, 'X'),
+ (0xF49, 'V'),
+ (0xF4D, 'M', 'ཌྷ'),
+ (0xF4E, 'V'),
+ (0xF52, 'M', 'དྷ'),
+ (0xF53, 'V'),
+ (0xF57, 'M', 'བྷ'),
+ (0xF58, 'V'),
+ (0xF5C, 'M', 'ཛྷ'),
+ (0xF5D, 'V'),
+ (0xF69, 'M', 'ཀྵ'),
+ (0xF6A, 'V'),
+ (0xF6D, 'X'),
+ (0xF71, 'V'),
+ (0xF73, 'M', 'ཱི'),
+ (0xF74, 'V'),
+ (0xF75, 'M', 'ཱུ'),
+ (0xF76, 'M', 'ྲྀ'),
+ (0xF77, 'M', 'ྲཱྀ'),
+ (0xF78, 'M', 'ླྀ'),
+ (0xF79, 'M', 'ླཱྀ'),
+ (0xF7A, 'V'),
+ (0xF81, 'M', 'ཱྀ'),
+ (0xF82, 'V'),
+ (0xF93, 'M', 'ྒྷ'),
+ (0xF94, 'V'),
+ (0xF98, 'X'),
+ (0xF99, 'V'),
+ (0xF9D, 'M', 'ྜྷ'),
+ (0xF9E, 'V'),
+ (0xFA2, 'M', 'ྡྷ'),
+ (0xFA3, 'V'),
+ (0xFA7, 'M', 'ྦྷ'),
+ (0xFA8, 'V'),
+ (0xFAC, 'M', 'ྫྷ'),
+ (0xFAD, 'V'),
+ (0xFB9, 'M', 'ྐྵ'),
+ (0xFBA, 'V'),
+ (0xFBD, 'X'),
+ (0xFBE, 'V'),
+ (0xFCD, 'X'),
+ (0xFCE, 'V'),
+ (0xFDB, 'X'),
+ (0x1000, 'V'),
+ (0x10A0, 'X'),
+ (0x10C7, 'M', 'ⴧ'),
+ (0x10C8, 'X'),
+ (0x10CD, 'M', 'ⴭ'),
+ (0x10CE, 'X'),
+ (0x10D0, 'V'),
+ (0x10FC, 'M', 'ნ'),
+ (0x10FD, 'V'),
+ (0x115F, 'X'),
+ (0x1161, 'V'),
+ (0x1249, 'X'),
+ (0x124A, 'V'),
+ (0x124E, 'X'),
+ (0x1250, 'V'),
+ (0x1257, 'X'),
+ (0x1258, 'V'),
+ (0x1259, 'X'),
+ (0x125A, 'V'),
+ (0x125E, 'X'),
+ (0x1260, 'V'),
+ (0x1289, 'X'),
+ (0x128A, 'V'),
+ (0x128E, 'X'),
+ (0x1290, 'V'),
+ (0x12B1, 'X'),
+ (0x12B2, 'V'),
+ (0x12B6, 'X'),
+ (0x12B8, 'V'),
+ (0x12BF, 'X'),
+ (0x12C0, 'V'),
+ (0x12C1, 'X'),
+ (0x12C2, 'V'),
+ (0x12C6, 'X'),
+ (0x12C8, 'V'),
+ (0x12D7, 'X'),
+ (0x12D8, 'V'),
+ (0x1311, 'X'),
+ (0x1312, 'V'),
+ ]
+
+def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1316, 'X'),
+ (0x1318, 'V'),
+ (0x135B, 'X'),
+ (0x135D, 'V'),
+ (0x137D, 'X'),
+ (0x1380, 'V'),
+ (0x139A, 'X'),
+ (0x13A0, 'V'),
+ (0x13F6, 'X'),
+ (0x13F8, 'M', 'Ᏸ'),
+ (0x13F9, 'M', 'Ᏹ'),
+ (0x13FA, 'M', 'Ᏺ'),
+ (0x13FB, 'M', 'Ᏻ'),
+ (0x13FC, 'M', 'Ᏼ'),
+ (0x13FD, 'M', 'Ᏽ'),
+ (0x13FE, 'X'),
+ (0x1400, 'V'),
+ (0x1680, 'X'),
+ (0x1681, 'V'),
+ (0x169D, 'X'),
+ (0x16A0, 'V'),
+ (0x16F9, 'X'),
+ (0x1700, 'V'),
+ (0x1716, 'X'),
+ (0x171F, 'V'),
+ (0x1737, 'X'),
+ (0x1740, 'V'),
+ (0x1754, 'X'),
+ (0x1760, 'V'),
+ (0x176D, 'X'),
+ (0x176E, 'V'),
+ (0x1771, 'X'),
+ (0x1772, 'V'),
+ (0x1774, 'X'),
+ (0x1780, 'V'),
+ (0x17B4, 'X'),
+ (0x17B6, 'V'),
+ (0x17DE, 'X'),
+ (0x17E0, 'V'),
+ (0x17EA, 'X'),
+ (0x17F0, 'V'),
+ (0x17FA, 'X'),
+ (0x1800, 'V'),
+ (0x1806, 'X'),
+ (0x1807, 'V'),
+ (0x180B, 'I'),
+ (0x180E, 'X'),
+ (0x180F, 'I'),
+ (0x1810, 'V'),
+ (0x181A, 'X'),
+ (0x1820, 'V'),
+ (0x1879, 'X'),
+ (0x1880, 'V'),
+ (0x18AB, 'X'),
+ (0x18B0, 'V'),
+ (0x18F6, 'X'),
+ (0x1900, 'V'),
+ (0x191F, 'X'),
+ (0x1920, 'V'),
+ (0x192C, 'X'),
+ (0x1930, 'V'),
+ (0x193C, 'X'),
+ (0x1940, 'V'),
+ (0x1941, 'X'),
+ (0x1944, 'V'),
+ (0x196E, 'X'),
+ (0x1970, 'V'),
+ (0x1975, 'X'),
+ (0x1980, 'V'),
+ (0x19AC, 'X'),
+ (0x19B0, 'V'),
+ (0x19CA, 'X'),
+ (0x19D0, 'V'),
+ (0x19DB, 'X'),
+ (0x19DE, 'V'),
+ (0x1A1C, 'X'),
+ (0x1A1E, 'V'),
+ (0x1A5F, 'X'),
+ (0x1A60, 'V'),
+ (0x1A7D, 'X'),
+ (0x1A7F, 'V'),
+ (0x1A8A, 'X'),
+ (0x1A90, 'V'),
+ (0x1A9A, 'X'),
+ (0x1AA0, 'V'),
+ (0x1AAE, 'X'),
+ (0x1AB0, 'V'),
+ (0x1ACF, 'X'),
+ (0x1B00, 'V'),
+ (0x1B4D, 'X'),
+ (0x1B50, 'V'),
+ (0x1B7F, 'X'),
+ (0x1B80, 'V'),
+ (0x1BF4, 'X'),
+ (0x1BFC, 'V'),
+ (0x1C38, 'X'),
+ (0x1C3B, 'V'),
+ (0x1C4A, 'X'),
+ (0x1C4D, 'V'),
+ (0x1C80, 'M', 'в'),
+ ]
+
+def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1C81, 'M', 'д'),
+ (0x1C82, 'M', 'о'),
+ (0x1C83, 'M', 'с'),
+ (0x1C84, 'M', 'т'),
+ (0x1C86, 'M', 'ъ'),
+ (0x1C87, 'M', 'ѣ'),
+ (0x1C88, 'M', 'ꙋ'),
+ (0x1C89, 'X'),
+ (0x1C90, 'M', 'ა'),
+ (0x1C91, 'M', 'ბ'),
+ (0x1C92, 'M', 'გ'),
+ (0x1C93, 'M', 'დ'),
+ (0x1C94, 'M', 'ე'),
+ (0x1C95, 'M', 'ვ'),
+ (0x1C96, 'M', 'ზ'),
+ (0x1C97, 'M', 'თ'),
+ (0x1C98, 'M', 'ი'),
+ (0x1C99, 'M', 'კ'),
+ (0x1C9A, 'M', 'ლ'),
+ (0x1C9B, 'M', 'მ'),
+ (0x1C9C, 'M', 'ნ'),
+ (0x1C9D, 'M', 'ო'),
+ (0x1C9E, 'M', 'პ'),
+ (0x1C9F, 'M', 'ჟ'),
+ (0x1CA0, 'M', 'რ'),
+ (0x1CA1, 'M', 'ს'),
+ (0x1CA2, 'M', 'ტ'),
+ (0x1CA3, 'M', 'უ'),
+ (0x1CA4, 'M', 'ფ'),
+ (0x1CA5, 'M', 'ქ'),
+ (0x1CA6, 'M', 'ღ'),
+ (0x1CA7, 'M', 'ყ'),
+ (0x1CA8, 'M', 'შ'),
+ (0x1CA9, 'M', 'ჩ'),
+ (0x1CAA, 'M', 'ც'),
+ (0x1CAB, 'M', 'ძ'),
+ (0x1CAC, 'M', 'წ'),
+ (0x1CAD, 'M', 'ჭ'),
+ (0x1CAE, 'M', 'ხ'),
+ (0x1CAF, 'M', 'ჯ'),
+ (0x1CB0, 'M', 'ჰ'),
+ (0x1CB1, 'M', 'ჱ'),
+ (0x1CB2, 'M', 'ჲ'),
+ (0x1CB3, 'M', 'ჳ'),
+ (0x1CB4, 'M', 'ჴ'),
+ (0x1CB5, 'M', 'ჵ'),
+ (0x1CB6, 'M', 'ჶ'),
+ (0x1CB7, 'M', 'ჷ'),
+ (0x1CB8, 'M', 'ჸ'),
+ (0x1CB9, 'M', 'ჹ'),
+ (0x1CBA, 'M', 'ჺ'),
+ (0x1CBB, 'X'),
+ (0x1CBD, 'M', 'ჽ'),
+ (0x1CBE, 'M', 'ჾ'),
+ (0x1CBF, 'M', 'ჿ'),
+ (0x1CC0, 'V'),
+ (0x1CC8, 'X'),
+ (0x1CD0, 'V'),
+ (0x1CFB, 'X'),
+ (0x1D00, 'V'),
+ (0x1D2C, 'M', 'a'),
+ (0x1D2D, 'M', 'æ'),
+ (0x1D2E, 'M', 'b'),
+ (0x1D2F, 'V'),
+ (0x1D30, 'M', 'd'),
+ (0x1D31, 'M', 'e'),
+ (0x1D32, 'M', 'ǝ'),
+ (0x1D33, 'M', 'g'),
+ (0x1D34, 'M', 'h'),
+ (0x1D35, 'M', 'i'),
+ (0x1D36, 'M', 'j'),
+ (0x1D37, 'M', 'k'),
+ (0x1D38, 'M', 'l'),
+ (0x1D39, 'M', 'm'),
+ (0x1D3A, 'M', 'n'),
+ (0x1D3B, 'V'),
+ (0x1D3C, 'M', 'o'),
+ (0x1D3D, 'M', 'ȣ'),
+ (0x1D3E, 'M', 'p'),
+ (0x1D3F, 'M', 'r'),
+ (0x1D40, 'M', 't'),
+ (0x1D41, 'M', 'u'),
+ (0x1D42, 'M', 'w'),
+ (0x1D43, 'M', 'a'),
+ (0x1D44, 'M', 'ɐ'),
+ (0x1D45, 'M', 'ɑ'),
+ (0x1D46, 'M', 'ᴂ'),
+ (0x1D47, 'M', 'b'),
+ (0x1D48, 'M', 'd'),
+ (0x1D49, 'M', 'e'),
+ (0x1D4A, 'M', 'ə'),
+ (0x1D4B, 'M', 'ɛ'),
+ (0x1D4C, 'M', 'ɜ'),
+ (0x1D4D, 'M', 'g'),
+ (0x1D4E, 'V'),
+ (0x1D4F, 'M', 'k'),
+ (0x1D50, 'M', 'm'),
+ (0x1D51, 'M', 'ŋ'),
+ (0x1D52, 'M', 'o'),
+ (0x1D53, 'M', 'ɔ'),
+ ]
+
+def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D54, 'M', 'ᴖ'),
+ (0x1D55, 'M', 'ᴗ'),
+ (0x1D56, 'M', 'p'),
+ (0x1D57, 'M', 't'),
+ (0x1D58, 'M', 'u'),
+ (0x1D59, 'M', 'ᴝ'),
+ (0x1D5A, 'M', 'ɯ'),
+ (0x1D5B, 'M', 'v'),
+ (0x1D5C, 'M', 'ᴥ'),
+ (0x1D5D, 'M', 'β'),
+ (0x1D5E, 'M', 'γ'),
+ (0x1D5F, 'M', 'δ'),
+ (0x1D60, 'M', 'φ'),
+ (0x1D61, 'M', 'χ'),
+ (0x1D62, 'M', 'i'),
+ (0x1D63, 'M', 'r'),
+ (0x1D64, 'M', 'u'),
+ (0x1D65, 'M', 'v'),
+ (0x1D66, 'M', 'β'),
+ (0x1D67, 'M', 'γ'),
+ (0x1D68, 'M', 'ρ'),
+ (0x1D69, 'M', 'φ'),
+ (0x1D6A, 'M', 'χ'),
+ (0x1D6B, 'V'),
+ (0x1D78, 'M', 'н'),
+ (0x1D79, 'V'),
+ (0x1D9B, 'M', 'ɒ'),
+ (0x1D9C, 'M', 'c'),
+ (0x1D9D, 'M', 'ɕ'),
+ (0x1D9E, 'M', 'ð'),
+ (0x1D9F, 'M', 'ɜ'),
+ (0x1DA0, 'M', 'f'),
+ (0x1DA1, 'M', 'ɟ'),
+ (0x1DA2, 'M', 'ɡ'),
+ (0x1DA3, 'M', 'ɥ'),
+ (0x1DA4, 'M', 'ɨ'),
+ (0x1DA5, 'M', 'ɩ'),
+ (0x1DA6, 'M', 'ɪ'),
+ (0x1DA7, 'M', 'ᵻ'),
+ (0x1DA8, 'M', 'ʝ'),
+ (0x1DA9, 'M', 'ɭ'),
+ (0x1DAA, 'M', 'ᶅ'),
+ (0x1DAB, 'M', 'ʟ'),
+ (0x1DAC, 'M', 'ɱ'),
+ (0x1DAD, 'M', 'ɰ'),
+ (0x1DAE, 'M', 'ɲ'),
+ (0x1DAF, 'M', 'ɳ'),
+ (0x1DB0, 'M', 'ɴ'),
+ (0x1DB1, 'M', 'ɵ'),
+ (0x1DB2, 'M', 'ɸ'),
+ (0x1DB3, 'M', 'ʂ'),
+ (0x1DB4, 'M', 'ʃ'),
+ (0x1DB5, 'M', 'ƫ'),
+ (0x1DB6, 'M', 'ʉ'),
+ (0x1DB7, 'M', 'ʊ'),
+ (0x1DB8, 'M', 'ᴜ'),
+ (0x1DB9, 'M', 'ʋ'),
+ (0x1DBA, 'M', 'ʌ'),
+ (0x1DBB, 'M', 'z'),
+ (0x1DBC, 'M', 'ʐ'),
+ (0x1DBD, 'M', 'ʑ'),
+ (0x1DBE, 'M', 'ʒ'),
+ (0x1DBF, 'M', 'θ'),
+ (0x1DC0, 'V'),
+ (0x1E00, 'M', 'ḁ'),
+ (0x1E01, 'V'),
+ (0x1E02, 'M', 'ḃ'),
+ (0x1E03, 'V'),
+ (0x1E04, 'M', 'ḅ'),
+ (0x1E05, 'V'),
+ (0x1E06, 'M', 'ḇ'),
+ (0x1E07, 'V'),
+ (0x1E08, 'M', 'ḉ'),
+ (0x1E09, 'V'),
+ (0x1E0A, 'M', 'ḋ'),
+ (0x1E0B, 'V'),
+ (0x1E0C, 'M', 'ḍ'),
+ (0x1E0D, 'V'),
+ (0x1E0E, 'M', 'ḏ'),
+ (0x1E0F, 'V'),
+ (0x1E10, 'M', 'ḑ'),
+ (0x1E11, 'V'),
+ (0x1E12, 'M', 'ḓ'),
+ (0x1E13, 'V'),
+ (0x1E14, 'M', 'ḕ'),
+ (0x1E15, 'V'),
+ (0x1E16, 'M', 'ḗ'),
+ (0x1E17, 'V'),
+ (0x1E18, 'M', 'ḙ'),
+ (0x1E19, 'V'),
+ (0x1E1A, 'M', 'ḛ'),
+ (0x1E1B, 'V'),
+ (0x1E1C, 'M', 'ḝ'),
+ (0x1E1D, 'V'),
+ (0x1E1E, 'M', 'ḟ'),
+ (0x1E1F, 'V'),
+ (0x1E20, 'M', 'ḡ'),
+ (0x1E21, 'V'),
+ (0x1E22, 'M', 'ḣ'),
+ (0x1E23, 'V'),
+ ]
+
+def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1E24, 'M', 'ḥ'),
+ (0x1E25, 'V'),
+ (0x1E26, 'M', 'ḧ'),
+ (0x1E27, 'V'),
+ (0x1E28, 'M', 'ḩ'),
+ (0x1E29, 'V'),
+ (0x1E2A, 'M', 'ḫ'),
+ (0x1E2B, 'V'),
+ (0x1E2C, 'M', 'ḭ'),
+ (0x1E2D, 'V'),
+ (0x1E2E, 'M', 'ḯ'),
+ (0x1E2F, 'V'),
+ (0x1E30, 'M', 'ḱ'),
+ (0x1E31, 'V'),
+ (0x1E32, 'M', 'ḳ'),
+ (0x1E33, 'V'),
+ (0x1E34, 'M', 'ḵ'),
+ (0x1E35, 'V'),
+ (0x1E36, 'M', 'ḷ'),
+ (0x1E37, 'V'),
+ (0x1E38, 'M', 'ḹ'),
+ (0x1E39, 'V'),
+ (0x1E3A, 'M', 'ḻ'),
+ (0x1E3B, 'V'),
+ (0x1E3C, 'M', 'ḽ'),
+ (0x1E3D, 'V'),
+ (0x1E3E, 'M', 'ḿ'),
+ (0x1E3F, 'V'),
+ (0x1E40, 'M', 'ṁ'),
+ (0x1E41, 'V'),
+ (0x1E42, 'M', 'ṃ'),
+ (0x1E43, 'V'),
+ (0x1E44, 'M', 'ṅ'),
+ (0x1E45, 'V'),
+ (0x1E46, 'M', 'ṇ'),
+ (0x1E47, 'V'),
+ (0x1E48, 'M', 'ṉ'),
+ (0x1E49, 'V'),
+ (0x1E4A, 'M', 'ṋ'),
+ (0x1E4B, 'V'),
+ (0x1E4C, 'M', 'ṍ'),
+ (0x1E4D, 'V'),
+ (0x1E4E, 'M', 'ṏ'),
+ (0x1E4F, 'V'),
+ (0x1E50, 'M', 'ṑ'),
+ (0x1E51, 'V'),
+ (0x1E52, 'M', 'ṓ'),
+ (0x1E53, 'V'),
+ (0x1E54, 'M', 'ṕ'),
+ (0x1E55, 'V'),
+ (0x1E56, 'M', 'ṗ'),
+ (0x1E57, 'V'),
+ (0x1E58, 'M', 'ṙ'),
+ (0x1E59, 'V'),
+ (0x1E5A, 'M', 'ṛ'),
+ (0x1E5B, 'V'),
+ (0x1E5C, 'M', 'ṝ'),
+ (0x1E5D, 'V'),
+ (0x1E5E, 'M', 'ṟ'),
+ (0x1E5F, 'V'),
+ (0x1E60, 'M', 'ṡ'),
+ (0x1E61, 'V'),
+ (0x1E62, 'M', 'ṣ'),
+ (0x1E63, 'V'),
+ (0x1E64, 'M', 'ṥ'),
+ (0x1E65, 'V'),
+ (0x1E66, 'M', 'ṧ'),
+ (0x1E67, 'V'),
+ (0x1E68, 'M', 'ṩ'),
+ (0x1E69, 'V'),
+ (0x1E6A, 'M', 'ṫ'),
+ (0x1E6B, 'V'),
+ (0x1E6C, 'M', 'ṭ'),
+ (0x1E6D, 'V'),
+ (0x1E6E, 'M', 'ṯ'),
+ (0x1E6F, 'V'),
+ (0x1E70, 'M', 'ṱ'),
+ (0x1E71, 'V'),
+ (0x1E72, 'M', 'ṳ'),
+ (0x1E73, 'V'),
+ (0x1E74, 'M', 'ṵ'),
+ (0x1E75, 'V'),
+ (0x1E76, 'M', 'ṷ'),
+ (0x1E77, 'V'),
+ (0x1E78, 'M', 'ṹ'),
+ (0x1E79, 'V'),
+ (0x1E7A, 'M', 'ṻ'),
+ (0x1E7B, 'V'),
+ (0x1E7C, 'M', 'ṽ'),
+ (0x1E7D, 'V'),
+ (0x1E7E, 'M', 'ṿ'),
+ (0x1E7F, 'V'),
+ (0x1E80, 'M', 'ẁ'),
+ (0x1E81, 'V'),
+ (0x1E82, 'M', 'ẃ'),
+ (0x1E83, 'V'),
+ (0x1E84, 'M', 'ẅ'),
+ (0x1E85, 'V'),
+ (0x1E86, 'M', 'ẇ'),
+ (0x1E87, 'V'),
+ ]
+
+def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1E88, 'M', 'ẉ'),
+ (0x1E89, 'V'),
+ (0x1E8A, 'M', 'ẋ'),
+ (0x1E8B, 'V'),
+ (0x1E8C, 'M', 'ẍ'),
+ (0x1E8D, 'V'),
+ (0x1E8E, 'M', 'ẏ'),
+ (0x1E8F, 'V'),
+ (0x1E90, 'M', 'ẑ'),
+ (0x1E91, 'V'),
+ (0x1E92, 'M', 'ẓ'),
+ (0x1E93, 'V'),
+ (0x1E94, 'M', 'ẕ'),
+ (0x1E95, 'V'),
+ (0x1E9A, 'M', 'aʾ'),
+ (0x1E9B, 'M', 'ṡ'),
+ (0x1E9C, 'V'),
+ (0x1E9E, 'M', 'ß'),
+ (0x1E9F, 'V'),
+ (0x1EA0, 'M', 'ạ'),
+ (0x1EA1, 'V'),
+ (0x1EA2, 'M', 'ả'),
+ (0x1EA3, 'V'),
+ (0x1EA4, 'M', 'ấ'),
+ (0x1EA5, 'V'),
+ (0x1EA6, 'M', 'ầ'),
+ (0x1EA7, 'V'),
+ (0x1EA8, 'M', 'ẩ'),
+ (0x1EA9, 'V'),
+ (0x1EAA, 'M', 'ẫ'),
+ (0x1EAB, 'V'),
+ (0x1EAC, 'M', 'ậ'),
+ (0x1EAD, 'V'),
+ (0x1EAE, 'M', 'ắ'),
+ (0x1EAF, 'V'),
+ (0x1EB0, 'M', 'ằ'),
+ (0x1EB1, 'V'),
+ (0x1EB2, 'M', 'ẳ'),
+ (0x1EB3, 'V'),
+ (0x1EB4, 'M', 'ẵ'),
+ (0x1EB5, 'V'),
+ (0x1EB6, 'M', 'ặ'),
+ (0x1EB7, 'V'),
+ (0x1EB8, 'M', 'ẹ'),
+ (0x1EB9, 'V'),
+ (0x1EBA, 'M', 'ẻ'),
+ (0x1EBB, 'V'),
+ (0x1EBC, 'M', 'ẽ'),
+ (0x1EBD, 'V'),
+ (0x1EBE, 'M', 'ế'),
+ (0x1EBF, 'V'),
+ (0x1EC0, 'M', 'ề'),
+ (0x1EC1, 'V'),
+ (0x1EC2, 'M', 'ể'),
+ (0x1EC3, 'V'),
+ (0x1EC4, 'M', 'ễ'),
+ (0x1EC5, 'V'),
+ (0x1EC6, 'M', 'ệ'),
+ (0x1EC7, 'V'),
+ (0x1EC8, 'M', 'ỉ'),
+ (0x1EC9, 'V'),
+ (0x1ECA, 'M', 'ị'),
+ (0x1ECB, 'V'),
+ (0x1ECC, 'M', 'ọ'),
+ (0x1ECD, 'V'),
+ (0x1ECE, 'M', 'ỏ'),
+ (0x1ECF, 'V'),
+ (0x1ED0, 'M', 'ố'),
+ (0x1ED1, 'V'),
+ (0x1ED2, 'M', 'ồ'),
+ (0x1ED3, 'V'),
+ (0x1ED4, 'M', 'ổ'),
+ (0x1ED5, 'V'),
+ (0x1ED6, 'M', 'ỗ'),
+ (0x1ED7, 'V'),
+ (0x1ED8, 'M', 'ộ'),
+ (0x1ED9, 'V'),
+ (0x1EDA, 'M', 'ớ'),
+ (0x1EDB, 'V'),
+ (0x1EDC, 'M', 'ờ'),
+ (0x1EDD, 'V'),
+ (0x1EDE, 'M', 'ở'),
+ (0x1EDF, 'V'),
+ (0x1EE0, 'M', 'ỡ'),
+ (0x1EE1, 'V'),
+ (0x1EE2, 'M', 'ợ'),
+ (0x1EE3, 'V'),
+ (0x1EE4, 'M', 'ụ'),
+ (0x1EE5, 'V'),
+ (0x1EE6, 'M', 'ủ'),
+ (0x1EE7, 'V'),
+ (0x1EE8, 'M', 'ứ'),
+ (0x1EE9, 'V'),
+ (0x1EEA, 'M', 'ừ'),
+ (0x1EEB, 'V'),
+ (0x1EEC, 'M', 'ử'),
+ (0x1EED, 'V'),
+ (0x1EEE, 'M', 'ữ'),
+ (0x1EEF, 'V'),
+ (0x1EF0, 'M', 'ự'),
+ ]
+
+def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1EF1, 'V'),
+ (0x1EF2, 'M', 'ỳ'),
+ (0x1EF3, 'V'),
+ (0x1EF4, 'M', 'ỵ'),
+ (0x1EF5, 'V'),
+ (0x1EF6, 'M', 'ỷ'),
+ (0x1EF7, 'V'),
+ (0x1EF8, 'M', 'ỹ'),
+ (0x1EF9, 'V'),
+ (0x1EFA, 'M', 'ỻ'),
+ (0x1EFB, 'V'),
+ (0x1EFC, 'M', 'ỽ'),
+ (0x1EFD, 'V'),
+ (0x1EFE, 'M', 'ỿ'),
+ (0x1EFF, 'V'),
+ (0x1F08, 'M', 'ἀ'),
+ (0x1F09, 'M', 'ἁ'),
+ (0x1F0A, 'M', 'ἂ'),
+ (0x1F0B, 'M', 'ἃ'),
+ (0x1F0C, 'M', 'ἄ'),
+ (0x1F0D, 'M', 'ἅ'),
+ (0x1F0E, 'M', 'ἆ'),
+ (0x1F0F, 'M', 'ἇ'),
+ (0x1F10, 'V'),
+ (0x1F16, 'X'),
+ (0x1F18, 'M', 'ἐ'),
+ (0x1F19, 'M', 'ἑ'),
+ (0x1F1A, 'M', 'ἒ'),
+ (0x1F1B, 'M', 'ἓ'),
+ (0x1F1C, 'M', 'ἔ'),
+ (0x1F1D, 'M', 'ἕ'),
+ (0x1F1E, 'X'),
+ (0x1F20, 'V'),
+ (0x1F28, 'M', 'ἠ'),
+ (0x1F29, 'M', 'ἡ'),
+ (0x1F2A, 'M', 'ἢ'),
+ (0x1F2B, 'M', 'ἣ'),
+ (0x1F2C, 'M', 'ἤ'),
+ (0x1F2D, 'M', 'ἥ'),
+ (0x1F2E, 'M', 'ἦ'),
+ (0x1F2F, 'M', 'ἧ'),
+ (0x1F30, 'V'),
+ (0x1F38, 'M', 'ἰ'),
+ (0x1F39, 'M', 'ἱ'),
+ (0x1F3A, 'M', 'ἲ'),
+ (0x1F3B, 'M', 'ἳ'),
+ (0x1F3C, 'M', 'ἴ'),
+ (0x1F3D, 'M', 'ἵ'),
+ (0x1F3E, 'M', 'ἶ'),
+ (0x1F3F, 'M', 'ἷ'),
+ (0x1F40, 'V'),
+ (0x1F46, 'X'),
+ (0x1F48, 'M', 'ὀ'),
+ (0x1F49, 'M', 'ὁ'),
+ (0x1F4A, 'M', 'ὂ'),
+ (0x1F4B, 'M', 'ὃ'),
+ (0x1F4C, 'M', 'ὄ'),
+ (0x1F4D, 'M', 'ὅ'),
+ (0x1F4E, 'X'),
+ (0x1F50, 'V'),
+ (0x1F58, 'X'),
+ (0x1F59, 'M', 'ὑ'),
+ (0x1F5A, 'X'),
+ (0x1F5B, 'M', 'ὓ'),
+ (0x1F5C, 'X'),
+ (0x1F5D, 'M', 'ὕ'),
+ (0x1F5E, 'X'),
+ (0x1F5F, 'M', 'ὗ'),
+ (0x1F60, 'V'),
+ (0x1F68, 'M', 'ὠ'),
+ (0x1F69, 'M', 'ὡ'),
+ (0x1F6A, 'M', 'ὢ'),
+ (0x1F6B, 'M', 'ὣ'),
+ (0x1F6C, 'M', 'ὤ'),
+ (0x1F6D, 'M', 'ὥ'),
+ (0x1F6E, 'M', 'ὦ'),
+ (0x1F6F, 'M', 'ὧ'),
+ (0x1F70, 'V'),
+ (0x1F71, 'M', 'ά'),
+ (0x1F72, 'V'),
+ (0x1F73, 'M', 'έ'),
+ (0x1F74, 'V'),
+ (0x1F75, 'M', 'ή'),
+ (0x1F76, 'V'),
+ (0x1F77, 'M', 'ί'),
+ (0x1F78, 'V'),
+ (0x1F79, 'M', 'ό'),
+ (0x1F7A, 'V'),
+ (0x1F7B, 'M', 'ύ'),
+ (0x1F7C, 'V'),
+ (0x1F7D, 'M', 'ώ'),
+ (0x1F7E, 'X'),
+ (0x1F80, 'M', 'ἀι'),
+ (0x1F81, 'M', 'ἁι'),
+ (0x1F82, 'M', 'ἂι'),
+ (0x1F83, 'M', 'ἃι'),
+ (0x1F84, 'M', 'ἄι'),
+ (0x1F85, 'M', 'ἅι'),
+ (0x1F86, 'M', 'ἆι'),
+ (0x1F87, 'M', 'ἇι'),
+ ]
+
+def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1F88, 'M', 'ἀι'),
+ (0x1F89, 'M', 'ἁι'),
+ (0x1F8A, 'M', 'ἂι'),
+ (0x1F8B, 'M', 'ἃι'),
+ (0x1F8C, 'M', 'ἄι'),
+ (0x1F8D, 'M', 'ἅι'),
+ (0x1F8E, 'M', 'ἆι'),
+ (0x1F8F, 'M', 'ἇι'),
+ (0x1F90, 'M', 'ἠι'),
+ (0x1F91, 'M', 'ἡι'),
+ (0x1F92, 'M', 'ἢι'),
+ (0x1F93, 'M', 'ἣι'),
+ (0x1F94, 'M', 'ἤι'),
+ (0x1F95, 'M', 'ἥι'),
+ (0x1F96, 'M', 'ἦι'),
+ (0x1F97, 'M', 'ἧι'),
+ (0x1F98, 'M', 'ἠι'),
+ (0x1F99, 'M', 'ἡι'),
+ (0x1F9A, 'M', 'ἢι'),
+ (0x1F9B, 'M', 'ἣι'),
+ (0x1F9C, 'M', 'ἤι'),
+ (0x1F9D, 'M', 'ἥι'),
+ (0x1F9E, 'M', 'ἦι'),
+ (0x1F9F, 'M', 'ἧι'),
+ (0x1FA0, 'M', 'ὠι'),
+ (0x1FA1, 'M', 'ὡι'),
+ (0x1FA2, 'M', 'ὢι'),
+ (0x1FA3, 'M', 'ὣι'),
+ (0x1FA4, 'M', 'ὤι'),
+ (0x1FA5, 'M', 'ὥι'),
+ (0x1FA6, 'M', 'ὦι'),
+ (0x1FA7, 'M', 'ὧι'),
+ (0x1FA8, 'M', 'ὠι'),
+ (0x1FA9, 'M', 'ὡι'),
+ (0x1FAA, 'M', 'ὢι'),
+ (0x1FAB, 'M', 'ὣι'),
+ (0x1FAC, 'M', 'ὤι'),
+ (0x1FAD, 'M', 'ὥι'),
+ (0x1FAE, 'M', 'ὦι'),
+ (0x1FAF, 'M', 'ὧι'),
+ (0x1FB0, 'V'),
+ (0x1FB2, 'M', 'ὰι'),
+ (0x1FB3, 'M', 'αι'),
+ (0x1FB4, 'M', 'άι'),
+ (0x1FB5, 'X'),
+ (0x1FB6, 'V'),
+ (0x1FB7, 'M', 'ᾶι'),
+ (0x1FB8, 'M', 'ᾰ'),
+ (0x1FB9, 'M', 'ᾱ'),
+ (0x1FBA, 'M', 'ὰ'),
+ (0x1FBB, 'M', 'ά'),
+ (0x1FBC, 'M', 'αι'),
+ (0x1FBD, '3', ' ̓'),
+ (0x1FBE, 'M', 'ι'),
+ (0x1FBF, '3', ' ̓'),
+ (0x1FC0, '3', ' ͂'),
+ (0x1FC1, '3', ' ̈͂'),
+ (0x1FC2, 'M', 'ὴι'),
+ (0x1FC3, 'M', 'ηι'),
+ (0x1FC4, 'M', 'ήι'),
+ (0x1FC5, 'X'),
+ (0x1FC6, 'V'),
+ (0x1FC7, 'M', 'ῆι'),
+ (0x1FC8, 'M', 'ὲ'),
+ (0x1FC9, 'M', 'έ'),
+ (0x1FCA, 'M', 'ὴ'),
+ (0x1FCB, 'M', 'ή'),
+ (0x1FCC, 'M', 'ηι'),
+ (0x1FCD, '3', ' ̓̀'),
+ (0x1FCE, '3', ' ̓́'),
+ (0x1FCF, '3', ' ̓͂'),
+ (0x1FD0, 'V'),
+ (0x1FD3, 'M', 'ΐ'),
+ (0x1FD4, 'X'),
+ (0x1FD6, 'V'),
+ (0x1FD8, 'M', 'ῐ'),
+ (0x1FD9, 'M', 'ῑ'),
+ (0x1FDA, 'M', 'ὶ'),
+ (0x1FDB, 'M', 'ί'),
+ (0x1FDC, 'X'),
+ (0x1FDD, '3', ' ̔̀'),
+ (0x1FDE, '3', ' ̔́'),
+ (0x1FDF, '3', ' ̔͂'),
+ (0x1FE0, 'V'),
+ (0x1FE3, 'M', 'ΰ'),
+ (0x1FE4, 'V'),
+ (0x1FE8, 'M', 'ῠ'),
+ (0x1FE9, 'M', 'ῡ'),
+ (0x1FEA, 'M', 'ὺ'),
+ (0x1FEB, 'M', 'ύ'),
+ (0x1FEC, 'M', 'ῥ'),
+ (0x1FED, '3', ' ̈̀'),
+ (0x1FEE, '3', ' ̈́'),
+ (0x1FEF, '3', '`'),
+ (0x1FF0, 'X'),
+ (0x1FF2, 'M', 'ὼι'),
+ (0x1FF3, 'M', 'ωι'),
+ (0x1FF4, 'M', 'ώι'),
+ (0x1FF5, 'X'),
+ (0x1FF6, 'V'),
+ ]
+
+def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1FF7, 'M', 'ῶι'),
+ (0x1FF8, 'M', 'ὸ'),
+ (0x1FF9, 'M', 'ό'),
+ (0x1FFA, 'M', 'ὼ'),
+ (0x1FFB, 'M', 'ώ'),
+ (0x1FFC, 'M', 'ωι'),
+ (0x1FFD, '3', ' ́'),
+ (0x1FFE, '3', ' ̔'),
+ (0x1FFF, 'X'),
+ (0x2000, '3', ' '),
+ (0x200B, 'I'),
+ (0x200C, 'D', ''),
+ (0x200E, 'X'),
+ (0x2010, 'V'),
+ (0x2011, 'M', '‐'),
+ (0x2012, 'V'),
+ (0x2017, '3', ' ̳'),
+ (0x2018, 'V'),
+ (0x2024, 'X'),
+ (0x2027, 'V'),
+ (0x2028, 'X'),
+ (0x202F, '3', ' '),
+ (0x2030, 'V'),
+ (0x2033, 'M', '′′'),
+ (0x2034, 'M', '′′′'),
+ (0x2035, 'V'),
+ (0x2036, 'M', '‵‵'),
+ (0x2037, 'M', '‵‵‵'),
+ (0x2038, 'V'),
+ (0x203C, '3', '!!'),
+ (0x203D, 'V'),
+ (0x203E, '3', ' ̅'),
+ (0x203F, 'V'),
+ (0x2047, '3', '??'),
+ (0x2048, '3', '?!'),
+ (0x2049, '3', '!?'),
+ (0x204A, 'V'),
+ (0x2057, 'M', '′′′′'),
+ (0x2058, 'V'),
+ (0x205F, '3', ' '),
+ (0x2060, 'I'),
+ (0x2061, 'X'),
+ (0x2064, 'I'),
+ (0x2065, 'X'),
+ (0x2070, 'M', '0'),
+ (0x2071, 'M', 'i'),
+ (0x2072, 'X'),
+ (0x2074, 'M', '4'),
+ (0x2075, 'M', '5'),
+ (0x2076, 'M', '6'),
+ (0x2077, 'M', '7'),
+ (0x2078, 'M', '8'),
+ (0x2079, 'M', '9'),
+ (0x207A, '3', '+'),
+ (0x207B, 'M', '−'),
+ (0x207C, '3', '='),
+ (0x207D, '3', '('),
+ (0x207E, '3', ')'),
+ (0x207F, 'M', 'n'),
+ (0x2080, 'M', '0'),
+ (0x2081, 'M', '1'),
+ (0x2082, 'M', '2'),
+ (0x2083, 'M', '3'),
+ (0x2084, 'M', '4'),
+ (0x2085, 'M', '5'),
+ (0x2086, 'M', '6'),
+ (0x2087, 'M', '7'),
+ (0x2088, 'M', '8'),
+ (0x2089, 'M', '9'),
+ (0x208A, '3', '+'),
+ (0x208B, 'M', '−'),
+ (0x208C, '3', '='),
+ (0x208D, '3', '('),
+ (0x208E, '3', ')'),
+ (0x208F, 'X'),
+ (0x2090, 'M', 'a'),
+ (0x2091, 'M', 'e'),
+ (0x2092, 'M', 'o'),
+ (0x2093, 'M', 'x'),
+ (0x2094, 'M', 'ə'),
+ (0x2095, 'M', 'h'),
+ (0x2096, 'M', 'k'),
+ (0x2097, 'M', 'l'),
+ (0x2098, 'M', 'm'),
+ (0x2099, 'M', 'n'),
+ (0x209A, 'M', 'p'),
+ (0x209B, 'M', 's'),
+ (0x209C, 'M', 't'),
+ (0x209D, 'X'),
+ (0x20A0, 'V'),
+ (0x20A8, 'M', 'rs'),
+ (0x20A9, 'V'),
+ (0x20C1, 'X'),
+ (0x20D0, 'V'),
+ (0x20F1, 'X'),
+ (0x2100, '3', 'a/c'),
+ (0x2101, '3', 'a/s'),
+ (0x2102, 'M', 'c'),
+ (0x2103, 'M', '°c'),
+ (0x2104, 'V'),
+ ]
+
+def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2105, '3', 'c/o'),
+ (0x2106, '3', 'c/u'),
+ (0x2107, 'M', 'ɛ'),
+ (0x2108, 'V'),
+ (0x2109, 'M', '°f'),
+ (0x210A, 'M', 'g'),
+ (0x210B, 'M', 'h'),
+ (0x210F, 'M', 'ħ'),
+ (0x2110, 'M', 'i'),
+ (0x2112, 'M', 'l'),
+ (0x2114, 'V'),
+ (0x2115, 'M', 'n'),
+ (0x2116, 'M', 'no'),
+ (0x2117, 'V'),
+ (0x2119, 'M', 'p'),
+ (0x211A, 'M', 'q'),
+ (0x211B, 'M', 'r'),
+ (0x211E, 'V'),
+ (0x2120, 'M', 'sm'),
+ (0x2121, 'M', 'tel'),
+ (0x2122, 'M', 'tm'),
+ (0x2123, 'V'),
+ (0x2124, 'M', 'z'),
+ (0x2125, 'V'),
+ (0x2126, 'M', 'ω'),
+ (0x2127, 'V'),
+ (0x2128, 'M', 'z'),
+ (0x2129, 'V'),
+ (0x212A, 'M', 'k'),
+ (0x212B, 'M', 'å'),
+ (0x212C, 'M', 'b'),
+ (0x212D, 'M', 'c'),
+ (0x212E, 'V'),
+ (0x212F, 'M', 'e'),
+ (0x2131, 'M', 'f'),
+ (0x2132, 'X'),
+ (0x2133, 'M', 'm'),
+ (0x2134, 'M', 'o'),
+ (0x2135, 'M', 'א'),
+ (0x2136, 'M', 'ב'),
+ (0x2137, 'M', 'ג'),
+ (0x2138, 'M', 'ד'),
+ (0x2139, 'M', 'i'),
+ (0x213A, 'V'),
+ (0x213B, 'M', 'fax'),
+ (0x213C, 'M', 'π'),
+ (0x213D, 'M', 'γ'),
+ (0x213F, 'M', 'π'),
+ (0x2140, 'M', '∑'),
+ (0x2141, 'V'),
+ (0x2145, 'M', 'd'),
+ (0x2147, 'M', 'e'),
+ (0x2148, 'M', 'i'),
+ (0x2149, 'M', 'j'),
+ (0x214A, 'V'),
+ (0x2150, 'M', '1⁄7'),
+ (0x2151, 'M', '1⁄9'),
+ (0x2152, 'M', '1⁄10'),
+ (0x2153, 'M', '1⁄3'),
+ (0x2154, 'M', '2⁄3'),
+ (0x2155, 'M', '1⁄5'),
+ (0x2156, 'M', '2⁄5'),
+ (0x2157, 'M', '3⁄5'),
+ (0x2158, 'M', '4⁄5'),
+ (0x2159, 'M', '1⁄6'),
+ (0x215A, 'M', '5⁄6'),
+ (0x215B, 'M', '1⁄8'),
+ (0x215C, 'M', '3⁄8'),
+ (0x215D, 'M', '5⁄8'),
+ (0x215E, 'M', '7⁄8'),
+ (0x215F, 'M', '1⁄'),
+ (0x2160, 'M', 'i'),
+ (0x2161, 'M', 'ii'),
+ (0x2162, 'M', 'iii'),
+ (0x2163, 'M', 'iv'),
+ (0x2164, 'M', 'v'),
+ (0x2165, 'M', 'vi'),
+ (0x2166, 'M', 'vii'),
+ (0x2167, 'M', 'viii'),
+ (0x2168, 'M', 'ix'),
+ (0x2169, 'M', 'x'),
+ (0x216A, 'M', 'xi'),
+ (0x216B, 'M', 'xii'),
+ (0x216C, 'M', 'l'),
+ (0x216D, 'M', 'c'),
+ (0x216E, 'M', 'd'),
+ (0x216F, 'M', 'm'),
+ (0x2170, 'M', 'i'),
+ (0x2171, 'M', 'ii'),
+ (0x2172, 'M', 'iii'),
+ (0x2173, 'M', 'iv'),
+ (0x2174, 'M', 'v'),
+ (0x2175, 'M', 'vi'),
+ (0x2176, 'M', 'vii'),
+ (0x2177, 'M', 'viii'),
+ (0x2178, 'M', 'ix'),
+ (0x2179, 'M', 'x'),
+ (0x217A, 'M', 'xi'),
+ (0x217B, 'M', 'xii'),
+ (0x217C, 'M', 'l'),
+ ]
+
+def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x217D, 'M', 'c'),
+ (0x217E, 'M', 'd'),
+ (0x217F, 'M', 'm'),
+ (0x2180, 'V'),
+ (0x2183, 'X'),
+ (0x2184, 'V'),
+ (0x2189, 'M', '0⁄3'),
+ (0x218A, 'V'),
+ (0x218C, 'X'),
+ (0x2190, 'V'),
+ (0x222C, 'M', '∫∫'),
+ (0x222D, 'M', '∫∫∫'),
+ (0x222E, 'V'),
+ (0x222F, 'M', '∮∮'),
+ (0x2230, 'M', '∮∮∮'),
+ (0x2231, 'V'),
+ (0x2329, 'M', '〈'),
+ (0x232A, 'M', '〉'),
+ (0x232B, 'V'),
+ (0x2427, 'X'),
+ (0x2440, 'V'),
+ (0x244B, 'X'),
+ (0x2460, 'M', '1'),
+ (0x2461, 'M', '2'),
+ (0x2462, 'M', '3'),
+ (0x2463, 'M', '4'),
+ (0x2464, 'M', '5'),
+ (0x2465, 'M', '6'),
+ (0x2466, 'M', '7'),
+ (0x2467, 'M', '8'),
+ (0x2468, 'M', '9'),
+ (0x2469, 'M', '10'),
+ (0x246A, 'M', '11'),
+ (0x246B, 'M', '12'),
+ (0x246C, 'M', '13'),
+ (0x246D, 'M', '14'),
+ (0x246E, 'M', '15'),
+ (0x246F, 'M', '16'),
+ (0x2470, 'M', '17'),
+ (0x2471, 'M', '18'),
+ (0x2472, 'M', '19'),
+ (0x2473, 'M', '20'),
+ (0x2474, '3', '(1)'),
+ (0x2475, '3', '(2)'),
+ (0x2476, '3', '(3)'),
+ (0x2477, '3', '(4)'),
+ (0x2478, '3', '(5)'),
+ (0x2479, '3', '(6)'),
+ (0x247A, '3', '(7)'),
+ (0x247B, '3', '(8)'),
+ (0x247C, '3', '(9)'),
+ (0x247D, '3', '(10)'),
+ (0x247E, '3', '(11)'),
+ (0x247F, '3', '(12)'),
+ (0x2480, '3', '(13)'),
+ (0x2481, '3', '(14)'),
+ (0x2482, '3', '(15)'),
+ (0x2483, '3', '(16)'),
+ (0x2484, '3', '(17)'),
+ (0x2485, '3', '(18)'),
+ (0x2486, '3', '(19)'),
+ (0x2487, '3', '(20)'),
+ (0x2488, 'X'),
+ (0x249C, '3', '(a)'),
+ (0x249D, '3', '(b)'),
+ (0x249E, '3', '(c)'),
+ (0x249F, '3', '(d)'),
+ (0x24A0, '3', '(e)'),
+ (0x24A1, '3', '(f)'),
+ (0x24A2, '3', '(g)'),
+ (0x24A3, '3', '(h)'),
+ (0x24A4, '3', '(i)'),
+ (0x24A5, '3', '(j)'),
+ (0x24A6, '3', '(k)'),
+ (0x24A7, '3', '(l)'),
+ (0x24A8, '3', '(m)'),
+ (0x24A9, '3', '(n)'),
+ (0x24AA, '3', '(o)'),
+ (0x24AB, '3', '(p)'),
+ (0x24AC, '3', '(q)'),
+ (0x24AD, '3', '(r)'),
+ (0x24AE, '3', '(s)'),
+ (0x24AF, '3', '(t)'),
+ (0x24B0, '3', '(u)'),
+ (0x24B1, '3', '(v)'),
+ (0x24B2, '3', '(w)'),
+ (0x24B3, '3', '(x)'),
+ (0x24B4, '3', '(y)'),
+ (0x24B5, '3', '(z)'),
+ (0x24B6, 'M', 'a'),
+ (0x24B7, 'M', 'b'),
+ (0x24B8, 'M', 'c'),
+ (0x24B9, 'M', 'd'),
+ (0x24BA, 'M', 'e'),
+ (0x24BB, 'M', 'f'),
+ (0x24BC, 'M', 'g'),
+ (0x24BD, 'M', 'h'),
+ (0x24BE, 'M', 'i'),
+ (0x24BF, 'M', 'j'),
+ (0x24C0, 'M', 'k'),
+ ]
+
+def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x24C1, 'M', 'l'),
+ (0x24C2, 'M', 'm'),
+ (0x24C3, 'M', 'n'),
+ (0x24C4, 'M', 'o'),
+ (0x24C5, 'M', 'p'),
+ (0x24C6, 'M', 'q'),
+ (0x24C7, 'M', 'r'),
+ (0x24C8, 'M', 's'),
+ (0x24C9, 'M', 't'),
+ (0x24CA, 'M', 'u'),
+ (0x24CB, 'M', 'v'),
+ (0x24CC, 'M', 'w'),
+ (0x24CD, 'M', 'x'),
+ (0x24CE, 'M', 'y'),
+ (0x24CF, 'M', 'z'),
+ (0x24D0, 'M', 'a'),
+ (0x24D1, 'M', 'b'),
+ (0x24D2, 'M', 'c'),
+ (0x24D3, 'M', 'd'),
+ (0x24D4, 'M', 'e'),
+ (0x24D5, 'M', 'f'),
+ (0x24D6, 'M', 'g'),
+ (0x24D7, 'M', 'h'),
+ (0x24D8, 'M', 'i'),
+ (0x24D9, 'M', 'j'),
+ (0x24DA, 'M', 'k'),
+ (0x24DB, 'M', 'l'),
+ (0x24DC, 'M', 'm'),
+ (0x24DD, 'M', 'n'),
+ (0x24DE, 'M', 'o'),
+ (0x24DF, 'M', 'p'),
+ (0x24E0, 'M', 'q'),
+ (0x24E1, 'M', 'r'),
+ (0x24E2, 'M', 's'),
+ (0x24E3, 'M', 't'),
+ (0x24E4, 'M', 'u'),
+ (0x24E5, 'M', 'v'),
+ (0x24E6, 'M', 'w'),
+ (0x24E7, 'M', 'x'),
+ (0x24E8, 'M', 'y'),
+ (0x24E9, 'M', 'z'),
+ (0x24EA, 'M', '0'),
+ (0x24EB, 'V'),
+ (0x2A0C, 'M', '∫∫∫∫'),
+ (0x2A0D, 'V'),
+ (0x2A74, '3', '::='),
+ (0x2A75, '3', '=='),
+ (0x2A76, '3', '==='),
+ (0x2A77, 'V'),
+ (0x2ADC, 'M', '⫝̸'),
+ (0x2ADD, 'V'),
+ (0x2B74, 'X'),
+ (0x2B76, 'V'),
+ (0x2B96, 'X'),
+ (0x2B97, 'V'),
+ (0x2C00, 'M', 'ⰰ'),
+ (0x2C01, 'M', 'ⰱ'),
+ (0x2C02, 'M', 'ⰲ'),
+ (0x2C03, 'M', 'ⰳ'),
+ (0x2C04, 'M', 'ⰴ'),
+ (0x2C05, 'M', 'ⰵ'),
+ (0x2C06, 'M', 'ⰶ'),
+ (0x2C07, 'M', 'ⰷ'),
+ (0x2C08, 'M', 'ⰸ'),
+ (0x2C09, 'M', 'ⰹ'),
+ (0x2C0A, 'M', 'ⰺ'),
+ (0x2C0B, 'M', 'ⰻ'),
+ (0x2C0C, 'M', 'ⰼ'),
+ (0x2C0D, 'M', 'ⰽ'),
+ (0x2C0E, 'M', 'ⰾ'),
+ (0x2C0F, 'M', 'ⰿ'),
+ (0x2C10, 'M', 'ⱀ'),
+ (0x2C11, 'M', 'ⱁ'),
+ (0x2C12, 'M', 'ⱂ'),
+ (0x2C13, 'M', 'ⱃ'),
+ (0x2C14, 'M', 'ⱄ'),
+ (0x2C15, 'M', 'ⱅ'),
+ (0x2C16, 'M', 'ⱆ'),
+ (0x2C17, 'M', 'ⱇ'),
+ (0x2C18, 'M', 'ⱈ'),
+ (0x2C19, 'M', 'ⱉ'),
+ (0x2C1A, 'M', 'ⱊ'),
+ (0x2C1B, 'M', 'ⱋ'),
+ (0x2C1C, 'M', 'ⱌ'),
+ (0x2C1D, 'M', 'ⱍ'),
+ (0x2C1E, 'M', 'ⱎ'),
+ (0x2C1F, 'M', 'ⱏ'),
+ (0x2C20, 'M', 'ⱐ'),
+ (0x2C21, 'M', 'ⱑ'),
+ (0x2C22, 'M', 'ⱒ'),
+ (0x2C23, 'M', 'ⱓ'),
+ (0x2C24, 'M', 'ⱔ'),
+ (0x2C25, 'M', 'ⱕ'),
+ (0x2C26, 'M', 'ⱖ'),
+ (0x2C27, 'M', 'ⱗ'),
+ (0x2C28, 'M', 'ⱘ'),
+ (0x2C29, 'M', 'ⱙ'),
+ (0x2C2A, 'M', 'ⱚ'),
+ (0x2C2B, 'M', 'ⱛ'),
+ (0x2C2C, 'M', 'ⱜ'),
+ ]
+
+def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2C2D, 'M', 'ⱝ'),
+ (0x2C2E, 'M', 'ⱞ'),
+ (0x2C2F, 'M', 'ⱟ'),
+ (0x2C30, 'V'),
+ (0x2C60, 'M', 'ⱡ'),
+ (0x2C61, 'V'),
+ (0x2C62, 'M', 'ɫ'),
+ (0x2C63, 'M', 'ᵽ'),
+ (0x2C64, 'M', 'ɽ'),
+ (0x2C65, 'V'),
+ (0x2C67, 'M', 'ⱨ'),
+ (0x2C68, 'V'),
+ (0x2C69, 'M', 'ⱪ'),
+ (0x2C6A, 'V'),
+ (0x2C6B, 'M', 'ⱬ'),
+ (0x2C6C, 'V'),
+ (0x2C6D, 'M', 'ɑ'),
+ (0x2C6E, 'M', 'ɱ'),
+ (0x2C6F, 'M', 'ɐ'),
+ (0x2C70, 'M', 'ɒ'),
+ (0x2C71, 'V'),
+ (0x2C72, 'M', 'ⱳ'),
+ (0x2C73, 'V'),
+ (0x2C75, 'M', 'ⱶ'),
+ (0x2C76, 'V'),
+ (0x2C7C, 'M', 'j'),
+ (0x2C7D, 'M', 'v'),
+ (0x2C7E, 'M', 'ȿ'),
+ (0x2C7F, 'M', 'ɀ'),
+ (0x2C80, 'M', 'ⲁ'),
+ (0x2C81, 'V'),
+ (0x2C82, 'M', 'ⲃ'),
+ (0x2C83, 'V'),
+ (0x2C84, 'M', 'ⲅ'),
+ (0x2C85, 'V'),
+ (0x2C86, 'M', 'ⲇ'),
+ (0x2C87, 'V'),
+ (0x2C88, 'M', 'ⲉ'),
+ (0x2C89, 'V'),
+ (0x2C8A, 'M', 'ⲋ'),
+ (0x2C8B, 'V'),
+ (0x2C8C, 'M', 'ⲍ'),
+ (0x2C8D, 'V'),
+ (0x2C8E, 'M', 'ⲏ'),
+ (0x2C8F, 'V'),
+ (0x2C90, 'M', 'ⲑ'),
+ (0x2C91, 'V'),
+ (0x2C92, 'M', 'ⲓ'),
+ (0x2C93, 'V'),
+ (0x2C94, 'M', 'ⲕ'),
+ (0x2C95, 'V'),
+ (0x2C96, 'M', 'ⲗ'),
+ (0x2C97, 'V'),
+ (0x2C98, 'M', 'ⲙ'),
+ (0x2C99, 'V'),
+ (0x2C9A, 'M', 'ⲛ'),
+ (0x2C9B, 'V'),
+ (0x2C9C, 'M', 'ⲝ'),
+ (0x2C9D, 'V'),
+ (0x2C9E, 'M', 'ⲟ'),
+ (0x2C9F, 'V'),
+ (0x2CA0, 'M', 'ⲡ'),
+ (0x2CA1, 'V'),
+ (0x2CA2, 'M', 'ⲣ'),
+ (0x2CA3, 'V'),
+ (0x2CA4, 'M', 'ⲥ'),
+ (0x2CA5, 'V'),
+ (0x2CA6, 'M', 'ⲧ'),
+ (0x2CA7, 'V'),
+ (0x2CA8, 'M', 'ⲩ'),
+ (0x2CA9, 'V'),
+ (0x2CAA, 'M', 'ⲫ'),
+ (0x2CAB, 'V'),
+ (0x2CAC, 'M', 'ⲭ'),
+ (0x2CAD, 'V'),
+ (0x2CAE, 'M', 'ⲯ'),
+ (0x2CAF, 'V'),
+ (0x2CB0, 'M', 'ⲱ'),
+ (0x2CB1, 'V'),
+ (0x2CB2, 'M', 'ⲳ'),
+ (0x2CB3, 'V'),
+ (0x2CB4, 'M', 'ⲵ'),
+ (0x2CB5, 'V'),
+ (0x2CB6, 'M', 'ⲷ'),
+ (0x2CB7, 'V'),
+ (0x2CB8, 'M', 'ⲹ'),
+ (0x2CB9, 'V'),
+ (0x2CBA, 'M', 'ⲻ'),
+ (0x2CBB, 'V'),
+ (0x2CBC, 'M', 'ⲽ'),
+ (0x2CBD, 'V'),
+ (0x2CBE, 'M', 'ⲿ'),
+ (0x2CBF, 'V'),
+ (0x2CC0, 'M', 'ⳁ'),
+ (0x2CC1, 'V'),
+ (0x2CC2, 'M', 'ⳃ'),
+ (0x2CC3, 'V'),
+ (0x2CC4, 'M', 'ⳅ'),
+ (0x2CC5, 'V'),
+ (0x2CC6, 'M', 'ⳇ'),
+ ]
+
+def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2CC7, 'V'),
+ (0x2CC8, 'M', 'ⳉ'),
+ (0x2CC9, 'V'),
+ (0x2CCA, 'M', 'ⳋ'),
+ (0x2CCB, 'V'),
+ (0x2CCC, 'M', 'ⳍ'),
+ (0x2CCD, 'V'),
+ (0x2CCE, 'M', 'ⳏ'),
+ (0x2CCF, 'V'),
+ (0x2CD0, 'M', 'ⳑ'),
+ (0x2CD1, 'V'),
+ (0x2CD2, 'M', 'ⳓ'),
+ (0x2CD3, 'V'),
+ (0x2CD4, 'M', 'ⳕ'),
+ (0x2CD5, 'V'),
+ (0x2CD6, 'M', 'ⳗ'),
+ (0x2CD7, 'V'),
+ (0x2CD8, 'M', 'ⳙ'),
+ (0x2CD9, 'V'),
+ (0x2CDA, 'M', 'ⳛ'),
+ (0x2CDB, 'V'),
+ (0x2CDC, 'M', 'ⳝ'),
+ (0x2CDD, 'V'),
+ (0x2CDE, 'M', 'ⳟ'),
+ (0x2CDF, 'V'),
+ (0x2CE0, 'M', 'ⳡ'),
+ (0x2CE1, 'V'),
+ (0x2CE2, 'M', 'ⳣ'),
+ (0x2CE3, 'V'),
+ (0x2CEB, 'M', 'ⳬ'),
+ (0x2CEC, 'V'),
+ (0x2CED, 'M', 'ⳮ'),
+ (0x2CEE, 'V'),
+ (0x2CF2, 'M', 'ⳳ'),
+ (0x2CF3, 'V'),
+ (0x2CF4, 'X'),
+ (0x2CF9, 'V'),
+ (0x2D26, 'X'),
+ (0x2D27, 'V'),
+ (0x2D28, 'X'),
+ (0x2D2D, 'V'),
+ (0x2D2E, 'X'),
+ (0x2D30, 'V'),
+ (0x2D68, 'X'),
+ (0x2D6F, 'M', 'ⵡ'),
+ (0x2D70, 'V'),
+ (0x2D71, 'X'),
+ (0x2D7F, 'V'),
+ (0x2D97, 'X'),
+ (0x2DA0, 'V'),
+ (0x2DA7, 'X'),
+ (0x2DA8, 'V'),
+ (0x2DAF, 'X'),
+ (0x2DB0, 'V'),
+ (0x2DB7, 'X'),
+ (0x2DB8, 'V'),
+ (0x2DBF, 'X'),
+ (0x2DC0, 'V'),
+ (0x2DC7, 'X'),
+ (0x2DC8, 'V'),
+ (0x2DCF, 'X'),
+ (0x2DD0, 'V'),
+ (0x2DD7, 'X'),
+ (0x2DD8, 'V'),
+ (0x2DDF, 'X'),
+ (0x2DE0, 'V'),
+ (0x2E5E, 'X'),
+ (0x2E80, 'V'),
+ (0x2E9A, 'X'),
+ (0x2E9B, 'V'),
+ (0x2E9F, 'M', '母'),
+ (0x2EA0, 'V'),
+ (0x2EF3, 'M', '龟'),
+ (0x2EF4, 'X'),
+ (0x2F00, 'M', '一'),
+ (0x2F01, 'M', '丨'),
+ (0x2F02, 'M', '丶'),
+ (0x2F03, 'M', '丿'),
+ (0x2F04, 'M', '乙'),
+ (0x2F05, 'M', '亅'),
+ (0x2F06, 'M', '二'),
+ (0x2F07, 'M', '亠'),
+ (0x2F08, 'M', '人'),
+ (0x2F09, 'M', '儿'),
+ (0x2F0A, 'M', '入'),
+ (0x2F0B, 'M', '八'),
+ (0x2F0C, 'M', '冂'),
+ (0x2F0D, 'M', '冖'),
+ (0x2F0E, 'M', '冫'),
+ (0x2F0F, 'M', '几'),
+ (0x2F10, 'M', '凵'),
+ (0x2F11, 'M', '刀'),
+ (0x2F12, 'M', '力'),
+ (0x2F13, 'M', '勹'),
+ (0x2F14, 'M', '匕'),
+ (0x2F15, 'M', '匚'),
+ (0x2F16, 'M', '匸'),
+ (0x2F17, 'M', '十'),
+ (0x2F18, 'M', '卜'),
+ (0x2F19, 'M', '卩'),
+ ]
+
+def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F1A, 'M', '厂'),
+ (0x2F1B, 'M', '厶'),
+ (0x2F1C, 'M', '又'),
+ (0x2F1D, 'M', '口'),
+ (0x2F1E, 'M', '囗'),
+ (0x2F1F, 'M', '土'),
+ (0x2F20, 'M', '士'),
+ (0x2F21, 'M', '夂'),
+ (0x2F22, 'M', '夊'),
+ (0x2F23, 'M', '夕'),
+ (0x2F24, 'M', '大'),
+ (0x2F25, 'M', '女'),
+ (0x2F26, 'M', '子'),
+ (0x2F27, 'M', '宀'),
+ (0x2F28, 'M', '寸'),
+ (0x2F29, 'M', '小'),
+ (0x2F2A, 'M', '尢'),
+ (0x2F2B, 'M', '尸'),
+ (0x2F2C, 'M', '屮'),
+ (0x2F2D, 'M', '山'),
+ (0x2F2E, 'M', '巛'),
+ (0x2F2F, 'M', '工'),
+ (0x2F30, 'M', '己'),
+ (0x2F31, 'M', '巾'),
+ (0x2F32, 'M', '干'),
+ (0x2F33, 'M', '幺'),
+ (0x2F34, 'M', '广'),
+ (0x2F35, 'M', '廴'),
+ (0x2F36, 'M', '廾'),
+ (0x2F37, 'M', '弋'),
+ (0x2F38, 'M', '弓'),
+ (0x2F39, 'M', '彐'),
+ (0x2F3A, 'M', '彡'),
+ (0x2F3B, 'M', '彳'),
+ (0x2F3C, 'M', '心'),
+ (0x2F3D, 'M', '戈'),
+ (0x2F3E, 'M', '戶'),
+ (0x2F3F, 'M', '手'),
+ (0x2F40, 'M', '支'),
+ (0x2F41, 'M', '攴'),
+ (0x2F42, 'M', '文'),
+ (0x2F43, 'M', '斗'),
+ (0x2F44, 'M', '斤'),
+ (0x2F45, 'M', '方'),
+ (0x2F46, 'M', '无'),
+ (0x2F47, 'M', '日'),
+ (0x2F48, 'M', '曰'),
+ (0x2F49, 'M', '月'),
+ (0x2F4A, 'M', '木'),
+ (0x2F4B, 'M', '欠'),
+ (0x2F4C, 'M', '止'),
+ (0x2F4D, 'M', '歹'),
+ (0x2F4E, 'M', '殳'),
+ (0x2F4F, 'M', '毋'),
+ (0x2F50, 'M', '比'),
+ (0x2F51, 'M', '毛'),
+ (0x2F52, 'M', '氏'),
+ (0x2F53, 'M', '气'),
+ (0x2F54, 'M', '水'),
+ (0x2F55, 'M', '火'),
+ (0x2F56, 'M', '爪'),
+ (0x2F57, 'M', '父'),
+ (0x2F58, 'M', '爻'),
+ (0x2F59, 'M', '爿'),
+ (0x2F5A, 'M', '片'),
+ (0x2F5B, 'M', '牙'),
+ (0x2F5C, 'M', '牛'),
+ (0x2F5D, 'M', '犬'),
+ (0x2F5E, 'M', '玄'),
+ (0x2F5F, 'M', '玉'),
+ (0x2F60, 'M', '瓜'),
+ (0x2F61, 'M', '瓦'),
+ (0x2F62, 'M', '甘'),
+ (0x2F63, 'M', '生'),
+ (0x2F64, 'M', '用'),
+ (0x2F65, 'M', '田'),
+ (0x2F66, 'M', '疋'),
+ (0x2F67, 'M', '疒'),
+ (0x2F68, 'M', '癶'),
+ (0x2F69, 'M', '白'),
+ (0x2F6A, 'M', '皮'),
+ (0x2F6B, 'M', '皿'),
+ (0x2F6C, 'M', '目'),
+ (0x2F6D, 'M', '矛'),
+ (0x2F6E, 'M', '矢'),
+ (0x2F6F, 'M', '石'),
+ (0x2F70, 'M', '示'),
+ (0x2F71, 'M', '禸'),
+ (0x2F72, 'M', '禾'),
+ (0x2F73, 'M', '穴'),
+ (0x2F74, 'M', '立'),
+ (0x2F75, 'M', '竹'),
+ (0x2F76, 'M', '米'),
+ (0x2F77, 'M', '糸'),
+ (0x2F78, 'M', '缶'),
+ (0x2F79, 'M', '网'),
+ (0x2F7A, 'M', '羊'),
+ (0x2F7B, 'M', '羽'),
+ (0x2F7C, 'M', '老'),
+ (0x2F7D, 'M', '而'),
+ ]
+
+def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F7E, 'M', '耒'),
+ (0x2F7F, 'M', '耳'),
+ (0x2F80, 'M', '聿'),
+ (0x2F81, 'M', '肉'),
+ (0x2F82, 'M', '臣'),
+ (0x2F83, 'M', '自'),
+ (0x2F84, 'M', '至'),
+ (0x2F85, 'M', '臼'),
+ (0x2F86, 'M', '舌'),
+ (0x2F87, 'M', '舛'),
+ (0x2F88, 'M', '舟'),
+ (0x2F89, 'M', '艮'),
+ (0x2F8A, 'M', '色'),
+ (0x2F8B, 'M', '艸'),
+ (0x2F8C, 'M', '虍'),
+ (0x2F8D, 'M', '虫'),
+ (0x2F8E, 'M', '血'),
+ (0x2F8F, 'M', '行'),
+ (0x2F90, 'M', '衣'),
+ (0x2F91, 'M', '襾'),
+ (0x2F92, 'M', '見'),
+ (0x2F93, 'M', '角'),
+ (0x2F94, 'M', '言'),
+ (0x2F95, 'M', '谷'),
+ (0x2F96, 'M', '豆'),
+ (0x2F97, 'M', '豕'),
+ (0x2F98, 'M', '豸'),
+ (0x2F99, 'M', '貝'),
+ (0x2F9A, 'M', '赤'),
+ (0x2F9B, 'M', '走'),
+ (0x2F9C, 'M', '足'),
+ (0x2F9D, 'M', '身'),
+ (0x2F9E, 'M', '車'),
+ (0x2F9F, 'M', '辛'),
+ (0x2FA0, 'M', '辰'),
+ (0x2FA1, 'M', '辵'),
+ (0x2FA2, 'M', '邑'),
+ (0x2FA3, 'M', '酉'),
+ (0x2FA4, 'M', '釆'),
+ (0x2FA5, 'M', '里'),
+ (0x2FA6, 'M', '金'),
+ (0x2FA7, 'M', '長'),
+ (0x2FA8, 'M', '門'),
+ (0x2FA9, 'M', '阜'),
+ (0x2FAA, 'M', '隶'),
+ (0x2FAB, 'M', '隹'),
+ (0x2FAC, 'M', '雨'),
+ (0x2FAD, 'M', '靑'),
+ (0x2FAE, 'M', '非'),
+ (0x2FAF, 'M', '面'),
+ (0x2FB0, 'M', '革'),
+ (0x2FB1, 'M', '韋'),
+ (0x2FB2, 'M', '韭'),
+ (0x2FB3, 'M', '音'),
+ (0x2FB4, 'M', '頁'),
+ (0x2FB5, 'M', '風'),
+ (0x2FB6, 'M', '飛'),
+ (0x2FB7, 'M', '食'),
+ (0x2FB8, 'M', '首'),
+ (0x2FB9, 'M', '香'),
+ (0x2FBA, 'M', '馬'),
+ (0x2FBB, 'M', '骨'),
+ (0x2FBC, 'M', '高'),
+ (0x2FBD, 'M', '髟'),
+ (0x2FBE, 'M', '鬥'),
+ (0x2FBF, 'M', '鬯'),
+ (0x2FC0, 'M', '鬲'),
+ (0x2FC1, 'M', '鬼'),
+ (0x2FC2, 'M', '魚'),
+ (0x2FC3, 'M', '鳥'),
+ (0x2FC4, 'M', '鹵'),
+ (0x2FC5, 'M', '鹿'),
+ (0x2FC6, 'M', '麥'),
+ (0x2FC7, 'M', '麻'),
+ (0x2FC8, 'M', '黃'),
+ (0x2FC9, 'M', '黍'),
+ (0x2FCA, 'M', '黑'),
+ (0x2FCB, 'M', '黹'),
+ (0x2FCC, 'M', '黽'),
+ (0x2FCD, 'M', '鼎'),
+ (0x2FCE, 'M', '鼓'),
+ (0x2FCF, 'M', '鼠'),
+ (0x2FD0, 'M', '鼻'),
+ (0x2FD1, 'M', '齊'),
+ (0x2FD2, 'M', '齒'),
+ (0x2FD3, 'M', '龍'),
+ (0x2FD4, 'M', '龜'),
+ (0x2FD5, 'M', '龠'),
+ (0x2FD6, 'X'),
+ (0x3000, '3', ' '),
+ (0x3001, 'V'),
+ (0x3002, 'M', '.'),
+ (0x3003, 'V'),
+ (0x3036, 'M', '〒'),
+ (0x3037, 'V'),
+ (0x3038, 'M', '十'),
+ (0x3039, 'M', '卄'),
+ (0x303A, 'M', '卅'),
+ (0x303B, 'V'),
+ (0x3040, 'X'),
+ ]
+
+def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x3041, 'V'),
+ (0x3097, 'X'),
+ (0x3099, 'V'),
+ (0x309B, '3', ' ゙'),
+ (0x309C, '3', ' ゚'),
+ (0x309D, 'V'),
+ (0x309F, 'M', 'より'),
+ (0x30A0, 'V'),
+ (0x30FF, 'M', 'コト'),
+ (0x3100, 'X'),
+ (0x3105, 'V'),
+ (0x3130, 'X'),
+ (0x3131, 'M', 'ᄀ'),
+ (0x3132, 'M', 'ᄁ'),
+ (0x3133, 'M', 'ᆪ'),
+ (0x3134, 'M', 'ᄂ'),
+ (0x3135, 'M', 'ᆬ'),
+ (0x3136, 'M', 'ᆭ'),
+ (0x3137, 'M', 'ᄃ'),
+ (0x3138, 'M', 'ᄄ'),
+ (0x3139, 'M', 'ᄅ'),
+ (0x313A, 'M', 'ᆰ'),
+ (0x313B, 'M', 'ᆱ'),
+ (0x313C, 'M', 'ᆲ'),
+ (0x313D, 'M', 'ᆳ'),
+ (0x313E, 'M', 'ᆴ'),
+ (0x313F, 'M', 'ᆵ'),
+ (0x3140, 'M', 'ᄚ'),
+ (0x3141, 'M', 'ᄆ'),
+ (0x3142, 'M', 'ᄇ'),
+ (0x3143, 'M', 'ᄈ'),
+ (0x3144, 'M', 'ᄡ'),
+ (0x3145, 'M', 'ᄉ'),
+ (0x3146, 'M', 'ᄊ'),
+ (0x3147, 'M', 'ᄋ'),
+ (0x3148, 'M', 'ᄌ'),
+ (0x3149, 'M', 'ᄍ'),
+ (0x314A, 'M', 'ᄎ'),
+ (0x314B, 'M', 'ᄏ'),
+ (0x314C, 'M', 'ᄐ'),
+ (0x314D, 'M', 'ᄑ'),
+ (0x314E, 'M', 'ᄒ'),
+ (0x314F, 'M', 'ᅡ'),
+ (0x3150, 'M', 'ᅢ'),
+ (0x3151, 'M', 'ᅣ'),
+ (0x3152, 'M', 'ᅤ'),
+ (0x3153, 'M', 'ᅥ'),
+ (0x3154, 'M', 'ᅦ'),
+ (0x3155, 'M', 'ᅧ'),
+ (0x3156, 'M', 'ᅨ'),
+ (0x3157, 'M', 'ᅩ'),
+ (0x3158, 'M', 'ᅪ'),
+ (0x3159, 'M', 'ᅫ'),
+ (0x315A, 'M', 'ᅬ'),
+ (0x315B, 'M', 'ᅭ'),
+ (0x315C, 'M', 'ᅮ'),
+ (0x315D, 'M', 'ᅯ'),
+ (0x315E, 'M', 'ᅰ'),
+ (0x315F, 'M', 'ᅱ'),
+ (0x3160, 'M', 'ᅲ'),
+ (0x3161, 'M', 'ᅳ'),
+ (0x3162, 'M', 'ᅴ'),
+ (0x3163, 'M', 'ᅵ'),
+ (0x3164, 'X'),
+ (0x3165, 'M', 'ᄔ'),
+ (0x3166, 'M', 'ᄕ'),
+ (0x3167, 'M', 'ᇇ'),
+ (0x3168, 'M', 'ᇈ'),
+ (0x3169, 'M', 'ᇌ'),
+ (0x316A, 'M', 'ᇎ'),
+ (0x316B, 'M', 'ᇓ'),
+ (0x316C, 'M', 'ᇗ'),
+ (0x316D, 'M', 'ᇙ'),
+ (0x316E, 'M', 'ᄜ'),
+ (0x316F, 'M', 'ᇝ'),
+ (0x3170, 'M', 'ᇟ'),
+ (0x3171, 'M', 'ᄝ'),
+ (0x3172, 'M', 'ᄞ'),
+ (0x3173, 'M', 'ᄠ'),
+ (0x3174, 'M', 'ᄢ'),
+ (0x3175, 'M', 'ᄣ'),
+ (0x3176, 'M', 'ᄧ'),
+ (0x3177, 'M', 'ᄩ'),
+ (0x3178, 'M', 'ᄫ'),
+ (0x3179, 'M', 'ᄬ'),
+ (0x317A, 'M', 'ᄭ'),
+ (0x317B, 'M', 'ᄮ'),
+ (0x317C, 'M', 'ᄯ'),
+ (0x317D, 'M', 'ᄲ'),
+ (0x317E, 'M', 'ᄶ'),
+ (0x317F, 'M', 'ᅀ'),
+ (0x3180, 'M', 'ᅇ'),
+ (0x3181, 'M', 'ᅌ'),
+ (0x3182, 'M', 'ᇱ'),
+ (0x3183, 'M', 'ᇲ'),
+ (0x3184, 'M', 'ᅗ'),
+ (0x3185, 'M', 'ᅘ'),
+ (0x3186, 'M', 'ᅙ'),
+ (0x3187, 'M', 'ᆄ'),
+ (0x3188, 'M', 'ᆅ'),
+ ]
+
+def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x3189, 'M', 'ᆈ'),
+ (0x318A, 'M', 'ᆑ'),
+ (0x318B, 'M', 'ᆒ'),
+ (0x318C, 'M', 'ᆔ'),
+ (0x318D, 'M', 'ᆞ'),
+ (0x318E, 'M', 'ᆡ'),
+ (0x318F, 'X'),
+ (0x3190, 'V'),
+ (0x3192, 'M', '一'),
+ (0x3193, 'M', '二'),
+ (0x3194, 'M', '三'),
+ (0x3195, 'M', '四'),
+ (0x3196, 'M', '上'),
+ (0x3197, 'M', '中'),
+ (0x3198, 'M', '下'),
+ (0x3199, 'M', '甲'),
+ (0x319A, 'M', '乙'),
+ (0x319B, 'M', '丙'),
+ (0x319C, 'M', '丁'),
+ (0x319D, 'M', '天'),
+ (0x319E, 'M', '地'),
+ (0x319F, 'M', '人'),
+ (0x31A0, 'V'),
+ (0x31E4, 'X'),
+ (0x31F0, 'V'),
+ (0x3200, '3', '(ᄀ)'),
+ (0x3201, '3', '(ᄂ)'),
+ (0x3202, '3', '(ᄃ)'),
+ (0x3203, '3', '(ᄅ)'),
+ (0x3204, '3', '(ᄆ)'),
+ (0x3205, '3', '(ᄇ)'),
+ (0x3206, '3', '(ᄉ)'),
+ (0x3207, '3', '(ᄋ)'),
+ (0x3208, '3', '(ᄌ)'),
+ (0x3209, '3', '(ᄎ)'),
+ (0x320A, '3', '(ᄏ)'),
+ (0x320B, '3', '(ᄐ)'),
+ (0x320C, '3', '(ᄑ)'),
+ (0x320D, '3', '(ᄒ)'),
+ (0x320E, '3', '(가)'),
+ (0x320F, '3', '(나)'),
+ (0x3210, '3', '(다)'),
+ (0x3211, '3', '(라)'),
+ (0x3212, '3', '(마)'),
+ (0x3213, '3', '(바)'),
+ (0x3214, '3', '(사)'),
+ (0x3215, '3', '(아)'),
+ (0x3216, '3', '(자)'),
+ (0x3217, '3', '(차)'),
+ (0x3218, '3', '(카)'),
+ (0x3219, '3', '(타)'),
+ (0x321A, '3', '(파)'),
+ (0x321B, '3', '(하)'),
+ (0x321C, '3', '(주)'),
+ (0x321D, '3', '(오전)'),
+ (0x321E, '3', '(오후)'),
+ (0x321F, 'X'),
+ (0x3220, '3', '(一)'),
+ (0x3221, '3', '(二)'),
+ (0x3222, '3', '(三)'),
+ (0x3223, '3', '(四)'),
+ (0x3224, '3', '(五)'),
+ (0x3225, '3', '(六)'),
+ (0x3226, '3', '(七)'),
+ (0x3227, '3', '(八)'),
+ (0x3228, '3', '(九)'),
+ (0x3229, '3', '(十)'),
+ (0x322A, '3', '(月)'),
+ (0x322B, '3', '(火)'),
+ (0x322C, '3', '(水)'),
+ (0x322D, '3', '(木)'),
+ (0x322E, '3', '(金)'),
+ (0x322F, '3', '(土)'),
+ (0x3230, '3', '(日)'),
+ (0x3231, '3', '(株)'),
+ (0x3232, '3', '(有)'),
+ (0x3233, '3', '(社)'),
+ (0x3234, '3', '(名)'),
+ (0x3235, '3', '(特)'),
+ (0x3236, '3', '(財)'),
+ (0x3237, '3', '(祝)'),
+ (0x3238, '3', '(労)'),
+ (0x3239, '3', '(代)'),
+ (0x323A, '3', '(呼)'),
+ (0x323B, '3', '(学)'),
+ (0x323C, '3', '(監)'),
+ (0x323D, '3', '(企)'),
+ (0x323E, '3', '(資)'),
+ (0x323F, '3', '(協)'),
+ (0x3240, '3', '(祭)'),
+ (0x3241, '3', '(休)'),
+ (0x3242, '3', '(自)'),
+ (0x3243, '3', '(至)'),
+ (0x3244, 'M', '問'),
+ (0x3245, 'M', '幼'),
+ (0x3246, 'M', '文'),
+ (0x3247, 'M', '箏'),
+ (0x3248, 'V'),
+ (0x3250, 'M', 'pte'),
+ (0x3251, 'M', '21'),
+ ]
+
+def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x3252, 'M', '22'),
+ (0x3253, 'M', '23'),
+ (0x3254, 'M', '24'),
+ (0x3255, 'M', '25'),
+ (0x3256, 'M', '26'),
+ (0x3257, 'M', '27'),
+ (0x3258, 'M', '28'),
+ (0x3259, 'M', '29'),
+ (0x325A, 'M', '30'),
+ (0x325B, 'M', '31'),
+ (0x325C, 'M', '32'),
+ (0x325D, 'M', '33'),
+ (0x325E, 'M', '34'),
+ (0x325F, 'M', '35'),
+ (0x3260, 'M', 'ᄀ'),
+ (0x3261, 'M', 'ᄂ'),
+ (0x3262, 'M', 'ᄃ'),
+ (0x3263, 'M', 'ᄅ'),
+ (0x3264, 'M', 'ᄆ'),
+ (0x3265, 'M', 'ᄇ'),
+ (0x3266, 'M', 'ᄉ'),
+ (0x3267, 'M', 'ᄋ'),
+ (0x3268, 'M', 'ᄌ'),
+ (0x3269, 'M', 'ᄎ'),
+ (0x326A, 'M', 'ᄏ'),
+ (0x326B, 'M', 'ᄐ'),
+ (0x326C, 'M', 'ᄑ'),
+ (0x326D, 'M', 'ᄒ'),
+ (0x326E, 'M', '가'),
+ (0x326F, 'M', '나'),
+ (0x3270, 'M', '다'),
+ (0x3271, 'M', '라'),
+ (0x3272, 'M', '마'),
+ (0x3273, 'M', '바'),
+ (0x3274, 'M', '사'),
+ (0x3275, 'M', '아'),
+ (0x3276, 'M', '자'),
+ (0x3277, 'M', '차'),
+ (0x3278, 'M', '카'),
+ (0x3279, 'M', '타'),
+ (0x327A, 'M', '파'),
+ (0x327B, 'M', '하'),
+ (0x327C, 'M', '참고'),
+ (0x327D, 'M', '주의'),
+ (0x327E, 'M', '우'),
+ (0x327F, 'V'),
+ (0x3280, 'M', '一'),
+ (0x3281, 'M', '二'),
+ (0x3282, 'M', '三'),
+ (0x3283, 'M', '四'),
+ (0x3284, 'M', '五'),
+ (0x3285, 'M', '六'),
+ (0x3286, 'M', '七'),
+ (0x3287, 'M', '八'),
+ (0x3288, 'M', '九'),
+ (0x3289, 'M', '十'),
+ (0x328A, 'M', '月'),
+ (0x328B, 'M', '火'),
+ (0x328C, 'M', '水'),
+ (0x328D, 'M', '木'),
+ (0x328E, 'M', '金'),
+ (0x328F, 'M', '土'),
+ (0x3290, 'M', '日'),
+ (0x3291, 'M', '株'),
+ (0x3292, 'M', '有'),
+ (0x3293, 'M', '社'),
+ (0x3294, 'M', '名'),
+ (0x3295, 'M', '特'),
+ (0x3296, 'M', '財'),
+ (0x3297, 'M', '祝'),
+ (0x3298, 'M', '労'),
+ (0x3299, 'M', '秘'),
+ (0x329A, 'M', '男'),
+ (0x329B, 'M', '女'),
+ (0x329C, 'M', '適'),
+ (0x329D, 'M', '優'),
+ (0x329E, 'M', '印'),
+ (0x329F, 'M', '注'),
+ (0x32A0, 'M', '項'),
+ (0x32A1, 'M', '休'),
+ (0x32A2, 'M', '写'),
+ (0x32A3, 'M', '正'),
+ (0x32A4, 'M', '上'),
+ (0x32A5, 'M', '中'),
+ (0x32A6, 'M', '下'),
+ (0x32A7, 'M', '左'),
+ (0x32A8, 'M', '右'),
+ (0x32A9, 'M', '医'),
+ (0x32AA, 'M', '宗'),
+ (0x32AB, 'M', '学'),
+ (0x32AC, 'M', '監'),
+ (0x32AD, 'M', '企'),
+ (0x32AE, 'M', '資'),
+ (0x32AF, 'M', '協'),
+ (0x32B0, 'M', '夜'),
+ (0x32B1, 'M', '36'),
+ (0x32B2, 'M', '37'),
+ (0x32B3, 'M', '38'),
+ (0x32B4, 'M', '39'),
+ (0x32B5, 'M', '40'),
+ ]
+
+def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x32B6, 'M', '41'),
+ (0x32B7, 'M', '42'),
+ (0x32B8, 'M', '43'),
+ (0x32B9, 'M', '44'),
+ (0x32BA, 'M', '45'),
+ (0x32BB, 'M', '46'),
+ (0x32BC, 'M', '47'),
+ (0x32BD, 'M', '48'),
+ (0x32BE, 'M', '49'),
+ (0x32BF, 'M', '50'),
+ (0x32C0, 'M', '1月'),
+ (0x32C1, 'M', '2月'),
+ (0x32C2, 'M', '3月'),
+ (0x32C3, 'M', '4月'),
+ (0x32C4, 'M', '5月'),
+ (0x32C5, 'M', '6月'),
+ (0x32C6, 'M', '7月'),
+ (0x32C7, 'M', '8月'),
+ (0x32C8, 'M', '9月'),
+ (0x32C9, 'M', '10月'),
+ (0x32CA, 'M', '11月'),
+ (0x32CB, 'M', '12月'),
+ (0x32CC, 'M', 'hg'),
+ (0x32CD, 'M', 'erg'),
+ (0x32CE, 'M', 'ev'),
+ (0x32CF, 'M', 'ltd'),
+ (0x32D0, 'M', 'ア'),
+ (0x32D1, 'M', 'イ'),
+ (0x32D2, 'M', 'ウ'),
+ (0x32D3, 'M', 'エ'),
+ (0x32D4, 'M', 'オ'),
+ (0x32D5, 'M', 'カ'),
+ (0x32D6, 'M', 'キ'),
+ (0x32D7, 'M', 'ク'),
+ (0x32D8, 'M', 'ケ'),
+ (0x32D9, 'M', 'コ'),
+ (0x32DA, 'M', 'サ'),
+ (0x32DB, 'M', 'シ'),
+ (0x32DC, 'M', 'ス'),
+ (0x32DD, 'M', 'セ'),
+ (0x32DE, 'M', 'ソ'),
+ (0x32DF, 'M', 'タ'),
+ (0x32E0, 'M', 'チ'),
+ (0x32E1, 'M', 'ツ'),
+ (0x32E2, 'M', 'テ'),
+ (0x32E3, 'M', 'ト'),
+ (0x32E4, 'M', 'ナ'),
+ (0x32E5, 'M', 'ニ'),
+ (0x32E6, 'M', 'ヌ'),
+ (0x32E7, 'M', 'ネ'),
+ (0x32E8, 'M', 'ノ'),
+ (0x32E9, 'M', 'ハ'),
+ (0x32EA, 'M', 'ヒ'),
+ (0x32EB, 'M', 'フ'),
+ (0x32EC, 'M', 'ヘ'),
+ (0x32ED, 'M', 'ホ'),
+ (0x32EE, 'M', 'マ'),
+ (0x32EF, 'M', 'ミ'),
+ (0x32F0, 'M', 'ム'),
+ (0x32F1, 'M', 'メ'),
+ (0x32F2, 'M', 'モ'),
+ (0x32F3, 'M', 'ヤ'),
+ (0x32F4, 'M', 'ユ'),
+ (0x32F5, 'M', 'ヨ'),
+ (0x32F6, 'M', 'ラ'),
+ (0x32F7, 'M', 'リ'),
+ (0x32F8, 'M', 'ル'),
+ (0x32F9, 'M', 'レ'),
+ (0x32FA, 'M', 'ロ'),
+ (0x32FB, 'M', 'ワ'),
+ (0x32FC, 'M', 'ヰ'),
+ (0x32FD, 'M', 'ヱ'),
+ (0x32FE, 'M', 'ヲ'),
+ (0x32FF, 'M', '令和'),
+ (0x3300, 'M', 'アパート'),
+ (0x3301, 'M', 'アルファ'),
+ (0x3302, 'M', 'アンペア'),
+ (0x3303, 'M', 'アール'),
+ (0x3304, 'M', 'イニング'),
+ (0x3305, 'M', 'インチ'),
+ (0x3306, 'M', 'ウォン'),
+ (0x3307, 'M', 'エスクード'),
+ (0x3308, 'M', 'エーカー'),
+ (0x3309, 'M', 'オンス'),
+ (0x330A, 'M', 'オーム'),
+ (0x330B, 'M', 'カイリ'),
+ (0x330C, 'M', 'カラット'),
+ (0x330D, 'M', 'カロリー'),
+ (0x330E, 'M', 'ガロン'),
+ (0x330F, 'M', 'ガンマ'),
+ (0x3310, 'M', 'ギガ'),
+ (0x3311, 'M', 'ギニー'),
+ (0x3312, 'M', 'キュリー'),
+ (0x3313, 'M', 'ギルダー'),
+ (0x3314, 'M', 'キロ'),
+ (0x3315, 'M', 'キログラム'),
+ (0x3316, 'M', 'キロメートル'),
+ (0x3317, 'M', 'キロワット'),
+ (0x3318, 'M', 'グラム'),
+ (0x3319, 'M', 'グラムトン'),
+ ]
+
+def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x331A, 'M', 'クルゼイロ'),
+ (0x331B, 'M', 'クローネ'),
+ (0x331C, 'M', 'ケース'),
+ (0x331D, 'M', 'コルナ'),
+ (0x331E, 'M', 'コーポ'),
+ (0x331F, 'M', 'サイクル'),
+ (0x3320, 'M', 'サンチーム'),
+ (0x3321, 'M', 'シリング'),
+ (0x3322, 'M', 'センチ'),
+ (0x3323, 'M', 'セント'),
+ (0x3324, 'M', 'ダース'),
+ (0x3325, 'M', 'デシ'),
+ (0x3326, 'M', 'ドル'),
+ (0x3327, 'M', 'トン'),
+ (0x3328, 'M', 'ナノ'),
+ (0x3329, 'M', 'ノット'),
+ (0x332A, 'M', 'ハイツ'),
+ (0x332B, 'M', 'パーセント'),
+ (0x332C, 'M', 'パーツ'),
+ (0x332D, 'M', 'バーレル'),
+ (0x332E, 'M', 'ピアストル'),
+ (0x332F, 'M', 'ピクル'),
+ (0x3330, 'M', 'ピコ'),
+ (0x3331, 'M', 'ビル'),
+ (0x3332, 'M', 'ファラッド'),
+ (0x3333, 'M', 'フィート'),
+ (0x3334, 'M', 'ブッシェル'),
+ (0x3335, 'M', 'フラン'),
+ (0x3336, 'M', 'ヘクタール'),
+ (0x3337, 'M', 'ペソ'),
+ (0x3338, 'M', 'ペニヒ'),
+ (0x3339, 'M', 'ヘルツ'),
+ (0x333A, 'M', 'ペンス'),
+ (0x333B, 'M', 'ページ'),
+ (0x333C, 'M', 'ベータ'),
+ (0x333D, 'M', 'ポイント'),
+ (0x333E, 'M', 'ボルト'),
+ (0x333F, 'M', 'ホン'),
+ (0x3340, 'M', 'ポンド'),
+ (0x3341, 'M', 'ホール'),
+ (0x3342, 'M', 'ホーン'),
+ (0x3343, 'M', 'マイクロ'),
+ (0x3344, 'M', 'マイル'),
+ (0x3345, 'M', 'マッハ'),
+ (0x3346, 'M', 'マルク'),
+ (0x3347, 'M', 'マンション'),
+ (0x3348, 'M', 'ミクロン'),
+ (0x3349, 'M', 'ミリ'),
+ (0x334A, 'M', 'ミリバール'),
+ (0x334B, 'M', 'メガ'),
+ (0x334C, 'M', 'メガトン'),
+ (0x334D, 'M', 'メートル'),
+ (0x334E, 'M', 'ヤード'),
+ (0x334F, 'M', 'ヤール'),
+ (0x3350, 'M', 'ユアン'),
+ (0x3351, 'M', 'リットル'),
+ (0x3352, 'M', 'リラ'),
+ (0x3353, 'M', 'ルピー'),
+ (0x3354, 'M', 'ルーブル'),
+ (0x3355, 'M', 'レム'),
+ (0x3356, 'M', 'レントゲン'),
+ (0x3357, 'M', 'ワット'),
+ (0x3358, 'M', '0点'),
+ (0x3359, 'M', '1点'),
+ (0x335A, 'M', '2点'),
+ (0x335B, 'M', '3点'),
+ (0x335C, 'M', '4点'),
+ (0x335D, 'M', '5点'),
+ (0x335E, 'M', '6点'),
+ (0x335F, 'M', '7点'),
+ (0x3360, 'M', '8点'),
+ (0x3361, 'M', '9点'),
+ (0x3362, 'M', '10点'),
+ (0x3363, 'M', '11点'),
+ (0x3364, 'M', '12点'),
+ (0x3365, 'M', '13点'),
+ (0x3366, 'M', '14点'),
+ (0x3367, 'M', '15点'),
+ (0x3368, 'M', '16点'),
+ (0x3369, 'M', '17点'),
+ (0x336A, 'M', '18点'),
+ (0x336B, 'M', '19点'),
+ (0x336C, 'M', '20点'),
+ (0x336D, 'M', '21点'),
+ (0x336E, 'M', '22点'),
+ (0x336F, 'M', '23点'),
+ (0x3370, 'M', '24点'),
+ (0x3371, 'M', 'hpa'),
+ (0x3372, 'M', 'da'),
+ (0x3373, 'M', 'au'),
+ (0x3374, 'M', 'bar'),
+ (0x3375, 'M', 'ov'),
+ (0x3376, 'M', 'pc'),
+ (0x3377, 'M', 'dm'),
+ (0x3378, 'M', 'dm2'),
+ (0x3379, 'M', 'dm3'),
+ (0x337A, 'M', 'iu'),
+ (0x337B, 'M', '平成'),
+ (0x337C, 'M', '昭和'),
+ (0x337D, 'M', '大正'),
+ ]
+
+def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x337E, 'M', '明治'),
+ (0x337F, 'M', '株式会社'),
+ (0x3380, 'M', 'pa'),
+ (0x3381, 'M', 'na'),
+ (0x3382, 'M', 'μa'),
+ (0x3383, 'M', 'ma'),
+ (0x3384, 'M', 'ka'),
+ (0x3385, 'M', 'kb'),
+ (0x3386, 'M', 'mb'),
+ (0x3387, 'M', 'gb'),
+ (0x3388, 'M', 'cal'),
+ (0x3389, 'M', 'kcal'),
+ (0x338A, 'M', 'pf'),
+ (0x338B, 'M', 'nf'),
+ (0x338C, 'M', 'μf'),
+ (0x338D, 'M', 'μg'),
+ (0x338E, 'M', 'mg'),
+ (0x338F, 'M', 'kg'),
+ (0x3390, 'M', 'hz'),
+ (0x3391, 'M', 'khz'),
+ (0x3392, 'M', 'mhz'),
+ (0x3393, 'M', 'ghz'),
+ (0x3394, 'M', 'thz'),
+ (0x3395, 'M', 'μl'),
+ (0x3396, 'M', 'ml'),
+ (0x3397, 'M', 'dl'),
+ (0x3398, 'M', 'kl'),
+ (0x3399, 'M', 'fm'),
+ (0x339A, 'M', 'nm'),
+ (0x339B, 'M', 'μm'),
+ (0x339C, 'M', 'mm'),
+ (0x339D, 'M', 'cm'),
+ (0x339E, 'M', 'km'),
+ (0x339F, 'M', 'mm2'),
+ (0x33A0, 'M', 'cm2'),
+ (0x33A1, 'M', 'm2'),
+ (0x33A2, 'M', 'km2'),
+ (0x33A3, 'M', 'mm3'),
+ (0x33A4, 'M', 'cm3'),
+ (0x33A5, 'M', 'm3'),
+ (0x33A6, 'M', 'km3'),
+ (0x33A7, 'M', 'm∕s'),
+ (0x33A8, 'M', 'm∕s2'),
+ (0x33A9, 'M', 'pa'),
+ (0x33AA, 'M', 'kpa'),
+ (0x33AB, 'M', 'mpa'),
+ (0x33AC, 'M', 'gpa'),
+ (0x33AD, 'M', 'rad'),
+ (0x33AE, 'M', 'rad∕s'),
+ (0x33AF, 'M', 'rad∕s2'),
+ (0x33B0, 'M', 'ps'),
+ (0x33B1, 'M', 'ns'),
+ (0x33B2, 'M', 'μs'),
+ (0x33B3, 'M', 'ms'),
+ (0x33B4, 'M', 'pv'),
+ (0x33B5, 'M', 'nv'),
+ (0x33B6, 'M', 'μv'),
+ (0x33B7, 'M', 'mv'),
+ (0x33B8, 'M', 'kv'),
+ (0x33B9, 'M', 'mv'),
+ (0x33BA, 'M', 'pw'),
+ (0x33BB, 'M', 'nw'),
+ (0x33BC, 'M', 'μw'),
+ (0x33BD, 'M', 'mw'),
+ (0x33BE, 'M', 'kw'),
+ (0x33BF, 'M', 'mw'),
+ (0x33C0, 'M', 'kω'),
+ (0x33C1, 'M', 'mω'),
+ (0x33C2, 'X'),
+ (0x33C3, 'M', 'bq'),
+ (0x33C4, 'M', 'cc'),
+ (0x33C5, 'M', 'cd'),
+ (0x33C6, 'M', 'c∕kg'),
+ (0x33C7, 'X'),
+ (0x33C8, 'M', 'db'),
+ (0x33C9, 'M', 'gy'),
+ (0x33CA, 'M', 'ha'),
+ (0x33CB, 'M', 'hp'),
+ (0x33CC, 'M', 'in'),
+ (0x33CD, 'M', 'kk'),
+ (0x33CE, 'M', 'km'),
+ (0x33CF, 'M', 'kt'),
+ (0x33D0, 'M', 'lm'),
+ (0x33D1, 'M', 'ln'),
+ (0x33D2, 'M', 'log'),
+ (0x33D3, 'M', 'lx'),
+ (0x33D4, 'M', 'mb'),
+ (0x33D5, 'M', 'mil'),
+ (0x33D6, 'M', 'mol'),
+ (0x33D7, 'M', 'ph'),
+ (0x33D8, 'X'),
+ (0x33D9, 'M', 'ppm'),
+ (0x33DA, 'M', 'pr'),
+ (0x33DB, 'M', 'sr'),
+ (0x33DC, 'M', 'sv'),
+ (0x33DD, 'M', 'wb'),
+ (0x33DE, 'M', 'v∕m'),
+ (0x33DF, 'M', 'a∕m'),
+ (0x33E0, 'M', '1日'),
+ (0x33E1, 'M', '2日'),
+ ]
+
+def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x33E2, 'M', '3日'),
+ (0x33E3, 'M', '4日'),
+ (0x33E4, 'M', '5日'),
+ (0x33E5, 'M', '6日'),
+ (0x33E6, 'M', '7日'),
+ (0x33E7, 'M', '8日'),
+ (0x33E8, 'M', '9日'),
+ (0x33E9, 'M', '10日'),
+ (0x33EA, 'M', '11日'),
+ (0x33EB, 'M', '12日'),
+ (0x33EC, 'M', '13日'),
+ (0x33ED, 'M', '14日'),
+ (0x33EE, 'M', '15日'),
+ (0x33EF, 'M', '16日'),
+ (0x33F0, 'M', '17日'),
+ (0x33F1, 'M', '18日'),
+ (0x33F2, 'M', '19日'),
+ (0x33F3, 'M', '20日'),
+ (0x33F4, 'M', '21日'),
+ (0x33F5, 'M', '22日'),
+ (0x33F6, 'M', '23日'),
+ (0x33F7, 'M', '24日'),
+ (0x33F8, 'M', '25日'),
+ (0x33F9, 'M', '26日'),
+ (0x33FA, 'M', '27日'),
+ (0x33FB, 'M', '28日'),
+ (0x33FC, 'M', '29日'),
+ (0x33FD, 'M', '30日'),
+ (0x33FE, 'M', '31日'),
+ (0x33FF, 'M', 'gal'),
+ (0x3400, 'V'),
+ (0xA48D, 'X'),
+ (0xA490, 'V'),
+ (0xA4C7, 'X'),
+ (0xA4D0, 'V'),
+ (0xA62C, 'X'),
+ (0xA640, 'M', 'ꙁ'),
+ (0xA641, 'V'),
+ (0xA642, 'M', 'ꙃ'),
+ (0xA643, 'V'),
+ (0xA644, 'M', 'ꙅ'),
+ (0xA645, 'V'),
+ (0xA646, 'M', 'ꙇ'),
+ (0xA647, 'V'),
+ (0xA648, 'M', 'ꙉ'),
+ (0xA649, 'V'),
+ (0xA64A, 'M', 'ꙋ'),
+ (0xA64B, 'V'),
+ (0xA64C, 'M', 'ꙍ'),
+ (0xA64D, 'V'),
+ (0xA64E, 'M', 'ꙏ'),
+ (0xA64F, 'V'),
+ (0xA650, 'M', 'ꙑ'),
+ (0xA651, 'V'),
+ (0xA652, 'M', 'ꙓ'),
+ (0xA653, 'V'),
+ (0xA654, 'M', 'ꙕ'),
+ (0xA655, 'V'),
+ (0xA656, 'M', 'ꙗ'),
+ (0xA657, 'V'),
+ (0xA658, 'M', 'ꙙ'),
+ (0xA659, 'V'),
+ (0xA65A, 'M', 'ꙛ'),
+ (0xA65B, 'V'),
+ (0xA65C, 'M', 'ꙝ'),
+ (0xA65D, 'V'),
+ (0xA65E, 'M', 'ꙟ'),
+ (0xA65F, 'V'),
+ (0xA660, 'M', 'ꙡ'),
+ (0xA661, 'V'),
+ (0xA662, 'M', 'ꙣ'),
+ (0xA663, 'V'),
+ (0xA664, 'M', 'ꙥ'),
+ (0xA665, 'V'),
+ (0xA666, 'M', 'ꙧ'),
+ (0xA667, 'V'),
+ (0xA668, 'M', 'ꙩ'),
+ (0xA669, 'V'),
+ (0xA66A, 'M', 'ꙫ'),
+ (0xA66B, 'V'),
+ (0xA66C, 'M', 'ꙭ'),
+ (0xA66D, 'V'),
+ (0xA680, 'M', 'ꚁ'),
+ (0xA681, 'V'),
+ (0xA682, 'M', 'ꚃ'),
+ (0xA683, 'V'),
+ (0xA684, 'M', 'ꚅ'),
+ (0xA685, 'V'),
+ (0xA686, 'M', 'ꚇ'),
+ (0xA687, 'V'),
+ (0xA688, 'M', 'ꚉ'),
+ (0xA689, 'V'),
+ (0xA68A, 'M', 'ꚋ'),
+ (0xA68B, 'V'),
+ (0xA68C, 'M', 'ꚍ'),
+ (0xA68D, 'V'),
+ (0xA68E, 'M', 'ꚏ'),
+ (0xA68F, 'V'),
+ (0xA690, 'M', 'ꚑ'),
+ (0xA691, 'V'),
+ ]
+
+def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xA692, 'M', 'ꚓ'),
+ (0xA693, 'V'),
+ (0xA694, 'M', 'ꚕ'),
+ (0xA695, 'V'),
+ (0xA696, 'M', 'ꚗ'),
+ (0xA697, 'V'),
+ (0xA698, 'M', 'ꚙ'),
+ (0xA699, 'V'),
+ (0xA69A, 'M', 'ꚛ'),
+ (0xA69B, 'V'),
+ (0xA69C, 'M', 'ъ'),
+ (0xA69D, 'M', 'ь'),
+ (0xA69E, 'V'),
+ (0xA6F8, 'X'),
+ (0xA700, 'V'),
+ (0xA722, 'M', 'ꜣ'),
+ (0xA723, 'V'),
+ (0xA724, 'M', 'ꜥ'),
+ (0xA725, 'V'),
+ (0xA726, 'M', 'ꜧ'),
+ (0xA727, 'V'),
+ (0xA728, 'M', 'ꜩ'),
+ (0xA729, 'V'),
+ (0xA72A, 'M', 'ꜫ'),
+ (0xA72B, 'V'),
+ (0xA72C, 'M', 'ꜭ'),
+ (0xA72D, 'V'),
+ (0xA72E, 'M', 'ꜯ'),
+ (0xA72F, 'V'),
+ (0xA732, 'M', 'ꜳ'),
+ (0xA733, 'V'),
+ (0xA734, 'M', 'ꜵ'),
+ (0xA735, 'V'),
+ (0xA736, 'M', 'ꜷ'),
+ (0xA737, 'V'),
+ (0xA738, 'M', 'ꜹ'),
+ (0xA739, 'V'),
+ (0xA73A, 'M', 'ꜻ'),
+ (0xA73B, 'V'),
+ (0xA73C, 'M', 'ꜽ'),
+ (0xA73D, 'V'),
+ (0xA73E, 'M', 'ꜿ'),
+ (0xA73F, 'V'),
+ (0xA740, 'M', 'ꝁ'),
+ (0xA741, 'V'),
+ (0xA742, 'M', 'ꝃ'),
+ (0xA743, 'V'),
+ (0xA744, 'M', 'ꝅ'),
+ (0xA745, 'V'),
+ (0xA746, 'M', 'ꝇ'),
+ (0xA747, 'V'),
+ (0xA748, 'M', 'ꝉ'),
+ (0xA749, 'V'),
+ (0xA74A, 'M', 'ꝋ'),
+ (0xA74B, 'V'),
+ (0xA74C, 'M', 'ꝍ'),
+ (0xA74D, 'V'),
+ (0xA74E, 'M', 'ꝏ'),
+ (0xA74F, 'V'),
+ (0xA750, 'M', 'ꝑ'),
+ (0xA751, 'V'),
+ (0xA752, 'M', 'ꝓ'),
+ (0xA753, 'V'),
+ (0xA754, 'M', 'ꝕ'),
+ (0xA755, 'V'),
+ (0xA756, 'M', 'ꝗ'),
+ (0xA757, 'V'),
+ (0xA758, 'M', 'ꝙ'),
+ (0xA759, 'V'),
+ (0xA75A, 'M', 'ꝛ'),
+ (0xA75B, 'V'),
+ (0xA75C, 'M', 'ꝝ'),
+ (0xA75D, 'V'),
+ (0xA75E, 'M', 'ꝟ'),
+ (0xA75F, 'V'),
+ (0xA760, 'M', 'ꝡ'),
+ (0xA761, 'V'),
+ (0xA762, 'M', 'ꝣ'),
+ (0xA763, 'V'),
+ (0xA764, 'M', 'ꝥ'),
+ (0xA765, 'V'),
+ (0xA766, 'M', 'ꝧ'),
+ (0xA767, 'V'),
+ (0xA768, 'M', 'ꝩ'),
+ (0xA769, 'V'),
+ (0xA76A, 'M', 'ꝫ'),
+ (0xA76B, 'V'),
+ (0xA76C, 'M', 'ꝭ'),
+ (0xA76D, 'V'),
+ (0xA76E, 'M', 'ꝯ'),
+ (0xA76F, 'V'),
+ (0xA770, 'M', 'ꝯ'),
+ (0xA771, 'V'),
+ (0xA779, 'M', 'ꝺ'),
+ (0xA77A, 'V'),
+ (0xA77B, 'M', 'ꝼ'),
+ (0xA77C, 'V'),
+ (0xA77D, 'M', 'ᵹ'),
+ (0xA77E, 'M', 'ꝿ'),
+ (0xA77F, 'V'),
+ ]
+
+def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xA780, 'M', 'ꞁ'),
+ (0xA781, 'V'),
+ (0xA782, 'M', 'ꞃ'),
+ (0xA783, 'V'),
+ (0xA784, 'M', 'ꞅ'),
+ (0xA785, 'V'),
+ (0xA786, 'M', 'ꞇ'),
+ (0xA787, 'V'),
+ (0xA78B, 'M', 'ꞌ'),
+ (0xA78C, 'V'),
+ (0xA78D, 'M', 'ɥ'),
+ (0xA78E, 'V'),
+ (0xA790, 'M', 'ꞑ'),
+ (0xA791, 'V'),
+ (0xA792, 'M', 'ꞓ'),
+ (0xA793, 'V'),
+ (0xA796, 'M', 'ꞗ'),
+ (0xA797, 'V'),
+ (0xA798, 'M', 'ꞙ'),
+ (0xA799, 'V'),
+ (0xA79A, 'M', 'ꞛ'),
+ (0xA79B, 'V'),
+ (0xA79C, 'M', 'ꞝ'),
+ (0xA79D, 'V'),
+ (0xA79E, 'M', 'ꞟ'),
+ (0xA79F, 'V'),
+ (0xA7A0, 'M', 'ꞡ'),
+ (0xA7A1, 'V'),
+ (0xA7A2, 'M', 'ꞣ'),
+ (0xA7A3, 'V'),
+ (0xA7A4, 'M', 'ꞥ'),
+ (0xA7A5, 'V'),
+ (0xA7A6, 'M', 'ꞧ'),
+ (0xA7A7, 'V'),
+ (0xA7A8, 'M', 'ꞩ'),
+ (0xA7A9, 'V'),
+ (0xA7AA, 'M', 'ɦ'),
+ (0xA7AB, 'M', 'ɜ'),
+ (0xA7AC, 'M', 'ɡ'),
+ (0xA7AD, 'M', 'ɬ'),
+ (0xA7AE, 'M', 'ɪ'),
+ (0xA7AF, 'V'),
+ (0xA7B0, 'M', 'ʞ'),
+ (0xA7B1, 'M', 'ʇ'),
+ (0xA7B2, 'M', 'ʝ'),
+ (0xA7B3, 'M', 'ꭓ'),
+ (0xA7B4, 'M', 'ꞵ'),
+ (0xA7B5, 'V'),
+ (0xA7B6, 'M', 'ꞷ'),
+ (0xA7B7, 'V'),
+ (0xA7B8, 'M', 'ꞹ'),
+ (0xA7B9, 'V'),
+ (0xA7BA, 'M', 'ꞻ'),
+ (0xA7BB, 'V'),
+ (0xA7BC, 'M', 'ꞽ'),
+ (0xA7BD, 'V'),
+ (0xA7BE, 'M', 'ꞿ'),
+ (0xA7BF, 'V'),
+ (0xA7C0, 'M', 'ꟁ'),
+ (0xA7C1, 'V'),
+ (0xA7C2, 'M', 'ꟃ'),
+ (0xA7C3, 'V'),
+ (0xA7C4, 'M', 'ꞔ'),
+ (0xA7C5, 'M', 'ʂ'),
+ (0xA7C6, 'M', 'ᶎ'),
+ (0xA7C7, 'M', 'ꟈ'),
+ (0xA7C8, 'V'),
+ (0xA7C9, 'M', 'ꟊ'),
+ (0xA7CA, 'V'),
+ (0xA7CB, 'X'),
+ (0xA7D0, 'M', 'ꟑ'),
+ (0xA7D1, 'V'),
+ (0xA7D2, 'X'),
+ (0xA7D3, 'V'),
+ (0xA7D4, 'X'),
+ (0xA7D5, 'V'),
+ (0xA7D6, 'M', 'ꟗ'),
+ (0xA7D7, 'V'),
+ (0xA7D8, 'M', 'ꟙ'),
+ (0xA7D9, 'V'),
+ (0xA7DA, 'X'),
+ (0xA7F2, 'M', 'c'),
+ (0xA7F3, 'M', 'f'),
+ (0xA7F4, 'M', 'q'),
+ (0xA7F5, 'M', 'ꟶ'),
+ (0xA7F6, 'V'),
+ (0xA7F8, 'M', 'ħ'),
+ (0xA7F9, 'M', 'œ'),
+ (0xA7FA, 'V'),
+ (0xA82D, 'X'),
+ (0xA830, 'V'),
+ (0xA83A, 'X'),
+ (0xA840, 'V'),
+ (0xA878, 'X'),
+ (0xA880, 'V'),
+ (0xA8C6, 'X'),
+ (0xA8CE, 'V'),
+ (0xA8DA, 'X'),
+ (0xA8E0, 'V'),
+ (0xA954, 'X'),
+ ]
+
+def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xA95F, 'V'),
+ (0xA97D, 'X'),
+ (0xA980, 'V'),
+ (0xA9CE, 'X'),
+ (0xA9CF, 'V'),
+ (0xA9DA, 'X'),
+ (0xA9DE, 'V'),
+ (0xA9FF, 'X'),
+ (0xAA00, 'V'),
+ (0xAA37, 'X'),
+ (0xAA40, 'V'),
+ (0xAA4E, 'X'),
+ (0xAA50, 'V'),
+ (0xAA5A, 'X'),
+ (0xAA5C, 'V'),
+ (0xAAC3, 'X'),
+ (0xAADB, 'V'),
+ (0xAAF7, 'X'),
+ (0xAB01, 'V'),
+ (0xAB07, 'X'),
+ (0xAB09, 'V'),
+ (0xAB0F, 'X'),
+ (0xAB11, 'V'),
+ (0xAB17, 'X'),
+ (0xAB20, 'V'),
+ (0xAB27, 'X'),
+ (0xAB28, 'V'),
+ (0xAB2F, 'X'),
+ (0xAB30, 'V'),
+ (0xAB5C, 'M', 'ꜧ'),
+ (0xAB5D, 'M', 'ꬷ'),
+ (0xAB5E, 'M', 'ɫ'),
+ (0xAB5F, 'M', 'ꭒ'),
+ (0xAB60, 'V'),
+ (0xAB69, 'M', 'ʍ'),
+ (0xAB6A, 'V'),
+ (0xAB6C, 'X'),
+ (0xAB70, 'M', 'Ꭰ'),
+ (0xAB71, 'M', 'Ꭱ'),
+ (0xAB72, 'M', 'Ꭲ'),
+ (0xAB73, 'M', 'Ꭳ'),
+ (0xAB74, 'M', 'Ꭴ'),
+ (0xAB75, 'M', 'Ꭵ'),
+ (0xAB76, 'M', 'Ꭶ'),
+ (0xAB77, 'M', 'Ꭷ'),
+ (0xAB78, 'M', 'Ꭸ'),
+ (0xAB79, 'M', 'Ꭹ'),
+ (0xAB7A, 'M', 'Ꭺ'),
+ (0xAB7B, 'M', 'Ꭻ'),
+ (0xAB7C, 'M', 'Ꭼ'),
+ (0xAB7D, 'M', 'Ꭽ'),
+ (0xAB7E, 'M', 'Ꭾ'),
+ (0xAB7F, 'M', 'Ꭿ'),
+ (0xAB80, 'M', 'Ꮀ'),
+ (0xAB81, 'M', 'Ꮁ'),
+ (0xAB82, 'M', 'Ꮂ'),
+ (0xAB83, 'M', 'Ꮃ'),
+ (0xAB84, 'M', 'Ꮄ'),
+ (0xAB85, 'M', 'Ꮅ'),
+ (0xAB86, 'M', 'Ꮆ'),
+ (0xAB87, 'M', 'Ꮇ'),
+ (0xAB88, 'M', 'Ꮈ'),
+ (0xAB89, 'M', 'Ꮉ'),
+ (0xAB8A, 'M', 'Ꮊ'),
+ (0xAB8B, 'M', 'Ꮋ'),
+ (0xAB8C, 'M', 'Ꮌ'),
+ (0xAB8D, 'M', 'Ꮍ'),
+ (0xAB8E, 'M', 'Ꮎ'),
+ (0xAB8F, 'M', 'Ꮏ'),
+ (0xAB90, 'M', 'Ꮐ'),
+ (0xAB91, 'M', 'Ꮑ'),
+ (0xAB92, 'M', 'Ꮒ'),
+ (0xAB93, 'M', 'Ꮓ'),
+ (0xAB94, 'M', 'Ꮔ'),
+ (0xAB95, 'M', 'Ꮕ'),
+ (0xAB96, 'M', 'Ꮖ'),
+ (0xAB97, 'M', 'Ꮗ'),
+ (0xAB98, 'M', 'Ꮘ'),
+ (0xAB99, 'M', 'Ꮙ'),
+ (0xAB9A, 'M', 'Ꮚ'),
+ (0xAB9B, 'M', 'Ꮛ'),
+ (0xAB9C, 'M', 'Ꮜ'),
+ (0xAB9D, 'M', 'Ꮝ'),
+ (0xAB9E, 'M', 'Ꮞ'),
+ (0xAB9F, 'M', 'Ꮟ'),
+ (0xABA0, 'M', 'Ꮠ'),
+ (0xABA1, 'M', 'Ꮡ'),
+ (0xABA2, 'M', 'Ꮢ'),
+ (0xABA3, 'M', 'Ꮣ'),
+ (0xABA4, 'M', 'Ꮤ'),
+ (0xABA5, 'M', 'Ꮥ'),
+ (0xABA6, 'M', 'Ꮦ'),
+ (0xABA7, 'M', 'Ꮧ'),
+ (0xABA8, 'M', 'Ꮨ'),
+ (0xABA9, 'M', 'Ꮩ'),
+ (0xABAA, 'M', 'Ꮪ'),
+ (0xABAB, 'M', 'Ꮫ'),
+ (0xABAC, 'M', 'Ꮬ'),
+ (0xABAD, 'M', 'Ꮭ'),
+ (0xABAE, 'M', 'Ꮮ'),
+ ]
+
+def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xABAF, 'M', 'Ꮯ'),
+ (0xABB0, 'M', 'Ꮰ'),
+ (0xABB1, 'M', 'Ꮱ'),
+ (0xABB2, 'M', 'Ꮲ'),
+ (0xABB3, 'M', 'Ꮳ'),
+ (0xABB4, 'M', 'Ꮴ'),
+ (0xABB5, 'M', 'Ꮵ'),
+ (0xABB6, 'M', 'Ꮶ'),
+ (0xABB7, 'M', 'Ꮷ'),
+ (0xABB8, 'M', 'Ꮸ'),
+ (0xABB9, 'M', 'Ꮹ'),
+ (0xABBA, 'M', 'Ꮺ'),
+ (0xABBB, 'M', 'Ꮻ'),
+ (0xABBC, 'M', 'Ꮼ'),
+ (0xABBD, 'M', 'Ꮽ'),
+ (0xABBE, 'M', 'Ꮾ'),
+ (0xABBF, 'M', 'Ꮿ'),
+ (0xABC0, 'V'),
+ (0xABEE, 'X'),
+ (0xABF0, 'V'),
+ (0xABFA, 'X'),
+ (0xAC00, 'V'),
+ (0xD7A4, 'X'),
+ (0xD7B0, 'V'),
+ (0xD7C7, 'X'),
+ (0xD7CB, 'V'),
+ (0xD7FC, 'X'),
+ (0xF900, 'M', '豈'),
+ (0xF901, 'M', '更'),
+ (0xF902, 'M', '車'),
+ (0xF903, 'M', '賈'),
+ (0xF904, 'M', '滑'),
+ (0xF905, 'M', '串'),
+ (0xF906, 'M', '句'),
+ (0xF907, 'M', '龜'),
+ (0xF909, 'M', '契'),
+ (0xF90A, 'M', '金'),
+ (0xF90B, 'M', '喇'),
+ (0xF90C, 'M', '奈'),
+ (0xF90D, 'M', '懶'),
+ (0xF90E, 'M', '癩'),
+ (0xF90F, 'M', '羅'),
+ (0xF910, 'M', '蘿'),
+ (0xF911, 'M', '螺'),
+ (0xF912, 'M', '裸'),
+ (0xF913, 'M', '邏'),
+ (0xF914, 'M', '樂'),
+ (0xF915, 'M', '洛'),
+ (0xF916, 'M', '烙'),
+ (0xF917, 'M', '珞'),
+ (0xF918, 'M', '落'),
+ (0xF919, 'M', '酪'),
+ (0xF91A, 'M', '駱'),
+ (0xF91B, 'M', '亂'),
+ (0xF91C, 'M', '卵'),
+ (0xF91D, 'M', '欄'),
+ (0xF91E, 'M', '爛'),
+ (0xF91F, 'M', '蘭'),
+ (0xF920, 'M', '鸞'),
+ (0xF921, 'M', '嵐'),
+ (0xF922, 'M', '濫'),
+ (0xF923, 'M', '藍'),
+ (0xF924, 'M', '襤'),
+ (0xF925, 'M', '拉'),
+ (0xF926, 'M', '臘'),
+ (0xF927, 'M', '蠟'),
+ (0xF928, 'M', '廊'),
+ (0xF929, 'M', '朗'),
+ (0xF92A, 'M', '浪'),
+ (0xF92B, 'M', '狼'),
+ (0xF92C, 'M', '郎'),
+ (0xF92D, 'M', '來'),
+ (0xF92E, 'M', '冷'),
+ (0xF92F, 'M', '勞'),
+ (0xF930, 'M', '擄'),
+ (0xF931, 'M', '櫓'),
+ (0xF932, 'M', '爐'),
+ (0xF933, 'M', '盧'),
+ (0xF934, 'M', '老'),
+ (0xF935, 'M', '蘆'),
+ (0xF936, 'M', '虜'),
+ (0xF937, 'M', '路'),
+ (0xF938, 'M', '露'),
+ (0xF939, 'M', '魯'),
+ (0xF93A, 'M', '鷺'),
+ (0xF93B, 'M', '碌'),
+ (0xF93C, 'M', '祿'),
+ (0xF93D, 'M', '綠'),
+ (0xF93E, 'M', '菉'),
+ (0xF93F, 'M', '錄'),
+ (0xF940, 'M', '鹿'),
+ (0xF941, 'M', '論'),
+ (0xF942, 'M', '壟'),
+ (0xF943, 'M', '弄'),
+ (0xF944, 'M', '籠'),
+ (0xF945, 'M', '聾'),
+ (0xF946, 'M', '牢'),
+ (0xF947, 'M', '磊'),
+ (0xF948, 'M', '賂'),
+ (0xF949, 'M', '雷'),
+ ]
+
+def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xF94A, 'M', '壘'),
+ (0xF94B, 'M', '屢'),
+ (0xF94C, 'M', '樓'),
+ (0xF94D, 'M', '淚'),
+ (0xF94E, 'M', '漏'),
+ (0xF94F, 'M', '累'),
+ (0xF950, 'M', '縷'),
+ (0xF951, 'M', '陋'),
+ (0xF952, 'M', '勒'),
+ (0xF953, 'M', '肋'),
+ (0xF954, 'M', '凜'),
+ (0xF955, 'M', '凌'),
+ (0xF956, 'M', '稜'),
+ (0xF957, 'M', '綾'),
+ (0xF958, 'M', '菱'),
+ (0xF959, 'M', '陵'),
+ (0xF95A, 'M', '讀'),
+ (0xF95B, 'M', '拏'),
+ (0xF95C, 'M', '樂'),
+ (0xF95D, 'M', '諾'),
+ (0xF95E, 'M', '丹'),
+ (0xF95F, 'M', '寧'),
+ (0xF960, 'M', '怒'),
+ (0xF961, 'M', '率'),
+ (0xF962, 'M', '異'),
+ (0xF963, 'M', '北'),
+ (0xF964, 'M', '磻'),
+ (0xF965, 'M', '便'),
+ (0xF966, 'M', '復'),
+ (0xF967, 'M', '不'),
+ (0xF968, 'M', '泌'),
+ (0xF969, 'M', '數'),
+ (0xF96A, 'M', '索'),
+ (0xF96B, 'M', '參'),
+ (0xF96C, 'M', '塞'),
+ (0xF96D, 'M', '省'),
+ (0xF96E, 'M', '葉'),
+ (0xF96F, 'M', '說'),
+ (0xF970, 'M', '殺'),
+ (0xF971, 'M', '辰'),
+ (0xF972, 'M', '沈'),
+ (0xF973, 'M', '拾'),
+ (0xF974, 'M', '若'),
+ (0xF975, 'M', '掠'),
+ (0xF976, 'M', '略'),
+ (0xF977, 'M', '亮'),
+ (0xF978, 'M', '兩'),
+ (0xF979, 'M', '凉'),
+ (0xF97A, 'M', '梁'),
+ (0xF97B, 'M', '糧'),
+ (0xF97C, 'M', '良'),
+ (0xF97D, 'M', '諒'),
+ (0xF97E, 'M', '量'),
+ (0xF97F, 'M', '勵'),
+ (0xF980, 'M', '呂'),
+ (0xF981, 'M', '女'),
+ (0xF982, 'M', '廬'),
+ (0xF983, 'M', '旅'),
+ (0xF984, 'M', '濾'),
+ (0xF985, 'M', '礪'),
+ (0xF986, 'M', '閭'),
+ (0xF987, 'M', '驪'),
+ (0xF988, 'M', '麗'),
+ (0xF989, 'M', '黎'),
+ (0xF98A, 'M', '力'),
+ (0xF98B, 'M', '曆'),
+ (0xF98C, 'M', '歷'),
+ (0xF98D, 'M', '轢'),
+ (0xF98E, 'M', '年'),
+ (0xF98F, 'M', '憐'),
+ (0xF990, 'M', '戀'),
+ (0xF991, 'M', '撚'),
+ (0xF992, 'M', '漣'),
+ (0xF993, 'M', '煉'),
+ (0xF994, 'M', '璉'),
+ (0xF995, 'M', '秊'),
+ (0xF996, 'M', '練'),
+ (0xF997, 'M', '聯'),
+ (0xF998, 'M', '輦'),
+ (0xF999, 'M', '蓮'),
+ (0xF99A, 'M', '連'),
+ (0xF99B, 'M', '鍊'),
+ (0xF99C, 'M', '列'),
+ (0xF99D, 'M', '劣'),
+ (0xF99E, 'M', '咽'),
+ (0xF99F, 'M', '烈'),
+ (0xF9A0, 'M', '裂'),
+ (0xF9A1, 'M', '說'),
+ (0xF9A2, 'M', '廉'),
+ (0xF9A3, 'M', '念'),
+ (0xF9A4, 'M', '捻'),
+ (0xF9A5, 'M', '殮'),
+ (0xF9A6, 'M', '簾'),
+ (0xF9A7, 'M', '獵'),
+ (0xF9A8, 'M', '令'),
+ (0xF9A9, 'M', '囹'),
+ (0xF9AA, 'M', '寧'),
+ (0xF9AB, 'M', '嶺'),
+ (0xF9AC, 'M', '怜'),
+ (0xF9AD, 'M', '玲'),
+ ]
+
+def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xF9AE, 'M', '瑩'),
+ (0xF9AF, 'M', '羚'),
+ (0xF9B0, 'M', '聆'),
+ (0xF9B1, 'M', '鈴'),
+ (0xF9B2, 'M', '零'),
+ (0xF9B3, 'M', '靈'),
+ (0xF9B4, 'M', '領'),
+ (0xF9B5, 'M', '例'),
+ (0xF9B6, 'M', '禮'),
+ (0xF9B7, 'M', '醴'),
+ (0xF9B8, 'M', '隸'),
+ (0xF9B9, 'M', '惡'),
+ (0xF9BA, 'M', '了'),
+ (0xF9BB, 'M', '僚'),
+ (0xF9BC, 'M', '寮'),
+ (0xF9BD, 'M', '尿'),
+ (0xF9BE, 'M', '料'),
+ (0xF9BF, 'M', '樂'),
+ (0xF9C0, 'M', '燎'),
+ (0xF9C1, 'M', '療'),
+ (0xF9C2, 'M', '蓼'),
+ (0xF9C3, 'M', '遼'),
+ (0xF9C4, 'M', '龍'),
+ (0xF9C5, 'M', '暈'),
+ (0xF9C6, 'M', '阮'),
+ (0xF9C7, 'M', '劉'),
+ (0xF9C8, 'M', '杻'),
+ (0xF9C9, 'M', '柳'),
+ (0xF9CA, 'M', '流'),
+ (0xF9CB, 'M', '溜'),
+ (0xF9CC, 'M', '琉'),
+ (0xF9CD, 'M', '留'),
+ (0xF9CE, 'M', '硫'),
+ (0xF9CF, 'M', '紐'),
+ (0xF9D0, 'M', '類'),
+ (0xF9D1, 'M', '六'),
+ (0xF9D2, 'M', '戮'),
+ (0xF9D3, 'M', '陸'),
+ (0xF9D4, 'M', '倫'),
+ (0xF9D5, 'M', '崙'),
+ (0xF9D6, 'M', '淪'),
+ (0xF9D7, 'M', '輪'),
+ (0xF9D8, 'M', '律'),
+ (0xF9D9, 'M', '慄'),
+ (0xF9DA, 'M', '栗'),
+ (0xF9DB, 'M', '率'),
+ (0xF9DC, 'M', '隆'),
+ (0xF9DD, 'M', '利'),
+ (0xF9DE, 'M', '吏'),
+ (0xF9DF, 'M', '履'),
+ (0xF9E0, 'M', '易'),
+ (0xF9E1, 'M', '李'),
+ (0xF9E2, 'M', '梨'),
+ (0xF9E3, 'M', '泥'),
+ (0xF9E4, 'M', '理'),
+ (0xF9E5, 'M', '痢'),
+ (0xF9E6, 'M', '罹'),
+ (0xF9E7, 'M', '裏'),
+ (0xF9E8, 'M', '裡'),
+ (0xF9E9, 'M', '里'),
+ (0xF9EA, 'M', '離'),
+ (0xF9EB, 'M', '匿'),
+ (0xF9EC, 'M', '溺'),
+ (0xF9ED, 'M', '吝'),
+ (0xF9EE, 'M', '燐'),
+ (0xF9EF, 'M', '璘'),
+ (0xF9F0, 'M', '藺'),
+ (0xF9F1, 'M', '隣'),
+ (0xF9F2, 'M', '鱗'),
+ (0xF9F3, 'M', '麟'),
+ (0xF9F4, 'M', '林'),
+ (0xF9F5, 'M', '淋'),
+ (0xF9F6, 'M', '臨'),
+ (0xF9F7, 'M', '立'),
+ (0xF9F8, 'M', '笠'),
+ (0xF9F9, 'M', '粒'),
+ (0xF9FA, 'M', '狀'),
+ (0xF9FB, 'M', '炙'),
+ (0xF9FC, 'M', '識'),
+ (0xF9FD, 'M', '什'),
+ (0xF9FE, 'M', '茶'),
+ (0xF9FF, 'M', '刺'),
+ (0xFA00, 'M', '切'),
+ (0xFA01, 'M', '度'),
+ (0xFA02, 'M', '拓'),
+ (0xFA03, 'M', '糖'),
+ (0xFA04, 'M', '宅'),
+ (0xFA05, 'M', '洞'),
+ (0xFA06, 'M', '暴'),
+ (0xFA07, 'M', '輻'),
+ (0xFA08, 'M', '行'),
+ (0xFA09, 'M', '降'),
+ (0xFA0A, 'M', '見'),
+ (0xFA0B, 'M', '廓'),
+ (0xFA0C, 'M', '兀'),
+ (0xFA0D, 'M', '嗀'),
+ (0xFA0E, 'V'),
+ (0xFA10, 'M', '塚'),
+ (0xFA11, 'V'),
+ (0xFA12, 'M', '晴'),
+ ]
+
+def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFA13, 'V'),
+ (0xFA15, 'M', '凞'),
+ (0xFA16, 'M', '猪'),
+ (0xFA17, 'M', '益'),
+ (0xFA18, 'M', '礼'),
+ (0xFA19, 'M', '神'),
+ (0xFA1A, 'M', '祥'),
+ (0xFA1B, 'M', '福'),
+ (0xFA1C, 'M', '靖'),
+ (0xFA1D, 'M', '精'),
+ (0xFA1E, 'M', '羽'),
+ (0xFA1F, 'V'),
+ (0xFA20, 'M', '蘒'),
+ (0xFA21, 'V'),
+ (0xFA22, 'M', '諸'),
+ (0xFA23, 'V'),
+ (0xFA25, 'M', '逸'),
+ (0xFA26, 'M', '都'),
+ (0xFA27, 'V'),
+ (0xFA2A, 'M', '飯'),
+ (0xFA2B, 'M', '飼'),
+ (0xFA2C, 'M', '館'),
+ (0xFA2D, 'M', '鶴'),
+ (0xFA2E, 'M', '郞'),
+ (0xFA2F, 'M', '隷'),
+ (0xFA30, 'M', '侮'),
+ (0xFA31, 'M', '僧'),
+ (0xFA32, 'M', '免'),
+ (0xFA33, 'M', '勉'),
+ (0xFA34, 'M', '勤'),
+ (0xFA35, 'M', '卑'),
+ (0xFA36, 'M', '喝'),
+ (0xFA37, 'M', '嘆'),
+ (0xFA38, 'M', '器'),
+ (0xFA39, 'M', '塀'),
+ (0xFA3A, 'M', '墨'),
+ (0xFA3B, 'M', '層'),
+ (0xFA3C, 'M', '屮'),
+ (0xFA3D, 'M', '悔'),
+ (0xFA3E, 'M', '慨'),
+ (0xFA3F, 'M', '憎'),
+ (0xFA40, 'M', '懲'),
+ (0xFA41, 'M', '敏'),
+ (0xFA42, 'M', '既'),
+ (0xFA43, 'M', '暑'),
+ (0xFA44, 'M', '梅'),
+ (0xFA45, 'M', '海'),
+ (0xFA46, 'M', '渚'),
+ (0xFA47, 'M', '漢'),
+ (0xFA48, 'M', '煮'),
+ (0xFA49, 'M', '爫'),
+ (0xFA4A, 'M', '琢'),
+ (0xFA4B, 'M', '碑'),
+ (0xFA4C, 'M', '社'),
+ (0xFA4D, 'M', '祉'),
+ (0xFA4E, 'M', '祈'),
+ (0xFA4F, 'M', '祐'),
+ (0xFA50, 'M', '祖'),
+ (0xFA51, 'M', '祝'),
+ (0xFA52, 'M', '禍'),
+ (0xFA53, 'M', '禎'),
+ (0xFA54, 'M', '穀'),
+ (0xFA55, 'M', '突'),
+ (0xFA56, 'M', '節'),
+ (0xFA57, 'M', '練'),
+ (0xFA58, 'M', '縉'),
+ (0xFA59, 'M', '繁'),
+ (0xFA5A, 'M', '署'),
+ (0xFA5B, 'M', '者'),
+ (0xFA5C, 'M', '臭'),
+ (0xFA5D, 'M', '艹'),
+ (0xFA5F, 'M', '著'),
+ (0xFA60, 'M', '褐'),
+ (0xFA61, 'M', '視'),
+ (0xFA62, 'M', '謁'),
+ (0xFA63, 'M', '謹'),
+ (0xFA64, 'M', '賓'),
+ (0xFA65, 'M', '贈'),
+ (0xFA66, 'M', '辶'),
+ (0xFA67, 'M', '逸'),
+ (0xFA68, 'M', '難'),
+ (0xFA69, 'M', '響'),
+ (0xFA6A, 'M', '頻'),
+ (0xFA6B, 'M', '恵'),
+ (0xFA6C, 'M', '𤋮'),
+ (0xFA6D, 'M', '舘'),
+ (0xFA6E, 'X'),
+ (0xFA70, 'M', '並'),
+ (0xFA71, 'M', '况'),
+ (0xFA72, 'M', '全'),
+ (0xFA73, 'M', '侀'),
+ (0xFA74, 'M', '充'),
+ (0xFA75, 'M', '冀'),
+ (0xFA76, 'M', '勇'),
+ (0xFA77, 'M', '勺'),
+ (0xFA78, 'M', '喝'),
+ (0xFA79, 'M', '啕'),
+ (0xFA7A, 'M', '喙'),
+ (0xFA7B, 'M', '嗢'),
+ (0xFA7C, 'M', '塚'),
+ ]
+
+def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFA7D, 'M', '墳'),
+ (0xFA7E, 'M', '奄'),
+ (0xFA7F, 'M', '奔'),
+ (0xFA80, 'M', '婢'),
+ (0xFA81, 'M', '嬨'),
+ (0xFA82, 'M', '廒'),
+ (0xFA83, 'M', '廙'),
+ (0xFA84, 'M', '彩'),
+ (0xFA85, 'M', '徭'),
+ (0xFA86, 'M', '惘'),
+ (0xFA87, 'M', '慎'),
+ (0xFA88, 'M', '愈'),
+ (0xFA89, 'M', '憎'),
+ (0xFA8A, 'M', '慠'),
+ (0xFA8B, 'M', '懲'),
+ (0xFA8C, 'M', '戴'),
+ (0xFA8D, 'M', '揄'),
+ (0xFA8E, 'M', '搜'),
+ (0xFA8F, 'M', '摒'),
+ (0xFA90, 'M', '敖'),
+ (0xFA91, 'M', '晴'),
+ (0xFA92, 'M', '朗'),
+ (0xFA93, 'M', '望'),
+ (0xFA94, 'M', '杖'),
+ (0xFA95, 'M', '歹'),
+ (0xFA96, 'M', '殺'),
+ (0xFA97, 'M', '流'),
+ (0xFA98, 'M', '滛'),
+ (0xFA99, 'M', '滋'),
+ (0xFA9A, 'M', '漢'),
+ (0xFA9B, 'M', '瀞'),
+ (0xFA9C, 'M', '煮'),
+ (0xFA9D, 'M', '瞧'),
+ (0xFA9E, 'M', '爵'),
+ (0xFA9F, 'M', '犯'),
+ (0xFAA0, 'M', '猪'),
+ (0xFAA1, 'M', '瑱'),
+ (0xFAA2, 'M', '甆'),
+ (0xFAA3, 'M', '画'),
+ (0xFAA4, 'M', '瘝'),
+ (0xFAA5, 'M', '瘟'),
+ (0xFAA6, 'M', '益'),
+ (0xFAA7, 'M', '盛'),
+ (0xFAA8, 'M', '直'),
+ (0xFAA9, 'M', '睊'),
+ (0xFAAA, 'M', '着'),
+ (0xFAAB, 'M', '磌'),
+ (0xFAAC, 'M', '窱'),
+ (0xFAAD, 'M', '節'),
+ (0xFAAE, 'M', '类'),
+ (0xFAAF, 'M', '絛'),
+ (0xFAB0, 'M', '練'),
+ (0xFAB1, 'M', '缾'),
+ (0xFAB2, 'M', '者'),
+ (0xFAB3, 'M', '荒'),
+ (0xFAB4, 'M', '華'),
+ (0xFAB5, 'M', '蝹'),
+ (0xFAB6, 'M', '襁'),
+ (0xFAB7, 'M', '覆'),
+ (0xFAB8, 'M', '視'),
+ (0xFAB9, 'M', '調'),
+ (0xFABA, 'M', '諸'),
+ (0xFABB, 'M', '請'),
+ (0xFABC, 'M', '謁'),
+ (0xFABD, 'M', '諾'),
+ (0xFABE, 'M', '諭'),
+ (0xFABF, 'M', '謹'),
+ (0xFAC0, 'M', '變'),
+ (0xFAC1, 'M', '贈'),
+ (0xFAC2, 'M', '輸'),
+ (0xFAC3, 'M', '遲'),
+ (0xFAC4, 'M', '醙'),
+ (0xFAC5, 'M', '鉶'),
+ (0xFAC6, 'M', '陼'),
+ (0xFAC7, 'M', '難'),
+ (0xFAC8, 'M', '靖'),
+ (0xFAC9, 'M', '韛'),
+ (0xFACA, 'M', '響'),
+ (0xFACB, 'M', '頋'),
+ (0xFACC, 'M', '頻'),
+ (0xFACD, 'M', '鬒'),
+ (0xFACE, 'M', '龜'),
+ (0xFACF, 'M', '𢡊'),
+ (0xFAD0, 'M', '𢡄'),
+ (0xFAD1, 'M', '𣏕'),
+ (0xFAD2, 'M', '㮝'),
+ (0xFAD3, 'M', '䀘'),
+ (0xFAD4, 'M', '䀹'),
+ (0xFAD5, 'M', '𥉉'),
+ (0xFAD6, 'M', '𥳐'),
+ (0xFAD7, 'M', '𧻓'),
+ (0xFAD8, 'M', '齃'),
+ (0xFAD9, 'M', '龎'),
+ (0xFADA, 'X'),
+ (0xFB00, 'M', 'ff'),
+ (0xFB01, 'M', 'fi'),
+ (0xFB02, 'M', 'fl'),
+ (0xFB03, 'M', 'ffi'),
+ (0xFB04, 'M', 'ffl'),
+ (0xFB05, 'M', 'st'),
+ ]
+
+def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFB07, 'X'),
+ (0xFB13, 'M', 'մն'),
+ (0xFB14, 'M', 'մե'),
+ (0xFB15, 'M', 'մի'),
+ (0xFB16, 'M', 'վն'),
+ (0xFB17, 'M', 'մխ'),
+ (0xFB18, 'X'),
+ (0xFB1D, 'M', 'יִ'),
+ (0xFB1E, 'V'),
+ (0xFB1F, 'M', 'ײַ'),
+ (0xFB20, 'M', 'ע'),
+ (0xFB21, 'M', 'א'),
+ (0xFB22, 'M', 'ד'),
+ (0xFB23, 'M', 'ה'),
+ (0xFB24, 'M', 'כ'),
+ (0xFB25, 'M', 'ל'),
+ (0xFB26, 'M', 'ם'),
+ (0xFB27, 'M', 'ר'),
+ (0xFB28, 'M', 'ת'),
+ (0xFB29, '3', '+'),
+ (0xFB2A, 'M', 'שׁ'),
+ (0xFB2B, 'M', 'שׂ'),
+ (0xFB2C, 'M', 'שּׁ'),
+ (0xFB2D, 'M', 'שּׂ'),
+ (0xFB2E, 'M', 'אַ'),
+ (0xFB2F, 'M', 'אָ'),
+ (0xFB30, 'M', 'אּ'),
+ (0xFB31, 'M', 'בּ'),
+ (0xFB32, 'M', 'גּ'),
+ (0xFB33, 'M', 'דּ'),
+ (0xFB34, 'M', 'הּ'),
+ (0xFB35, 'M', 'וּ'),
+ (0xFB36, 'M', 'זּ'),
+ (0xFB37, 'X'),
+ (0xFB38, 'M', 'טּ'),
+ (0xFB39, 'M', 'יּ'),
+ (0xFB3A, 'M', 'ךּ'),
+ (0xFB3B, 'M', 'כּ'),
+ (0xFB3C, 'M', 'לּ'),
+ (0xFB3D, 'X'),
+ (0xFB3E, 'M', 'מּ'),
+ (0xFB3F, 'X'),
+ (0xFB40, 'M', 'נּ'),
+ (0xFB41, 'M', 'סּ'),
+ (0xFB42, 'X'),
+ (0xFB43, 'M', 'ףּ'),
+ (0xFB44, 'M', 'פּ'),
+ (0xFB45, 'X'),
+ (0xFB46, 'M', 'צּ'),
+ (0xFB47, 'M', 'קּ'),
+ (0xFB48, 'M', 'רּ'),
+ (0xFB49, 'M', 'שּ'),
+ (0xFB4A, 'M', 'תּ'),
+ (0xFB4B, 'M', 'וֹ'),
+ (0xFB4C, 'M', 'בֿ'),
+ (0xFB4D, 'M', 'כֿ'),
+ (0xFB4E, 'M', 'פֿ'),
+ (0xFB4F, 'M', 'אל'),
+ (0xFB50, 'M', 'ٱ'),
+ (0xFB52, 'M', 'ٻ'),
+ (0xFB56, 'M', 'پ'),
+ (0xFB5A, 'M', 'ڀ'),
+ (0xFB5E, 'M', 'ٺ'),
+ (0xFB62, 'M', 'ٿ'),
+ (0xFB66, 'M', 'ٹ'),
+ (0xFB6A, 'M', 'ڤ'),
+ (0xFB6E, 'M', 'ڦ'),
+ (0xFB72, 'M', 'ڄ'),
+ (0xFB76, 'M', 'ڃ'),
+ (0xFB7A, 'M', 'چ'),
+ (0xFB7E, 'M', 'ڇ'),
+ (0xFB82, 'M', 'ڍ'),
+ (0xFB84, 'M', 'ڌ'),
+ (0xFB86, 'M', 'ڎ'),
+ (0xFB88, 'M', 'ڈ'),
+ (0xFB8A, 'M', 'ژ'),
+ (0xFB8C, 'M', 'ڑ'),
+ (0xFB8E, 'M', 'ک'),
+ (0xFB92, 'M', 'گ'),
+ (0xFB96, 'M', 'ڳ'),
+ (0xFB9A, 'M', 'ڱ'),
+ (0xFB9E, 'M', 'ں'),
+ (0xFBA0, 'M', 'ڻ'),
+ (0xFBA4, 'M', 'ۀ'),
+ (0xFBA6, 'M', 'ہ'),
+ (0xFBAA, 'M', 'ھ'),
+ (0xFBAE, 'M', 'ے'),
+ (0xFBB0, 'M', 'ۓ'),
+ (0xFBB2, 'V'),
+ (0xFBC3, 'X'),
+ (0xFBD3, 'M', 'ڭ'),
+ (0xFBD7, 'M', 'ۇ'),
+ (0xFBD9, 'M', 'ۆ'),
+ (0xFBDB, 'M', 'ۈ'),
+ (0xFBDD, 'M', 'ۇٴ'),
+ (0xFBDE, 'M', 'ۋ'),
+ (0xFBE0, 'M', 'ۅ'),
+ (0xFBE2, 'M', 'ۉ'),
+ (0xFBE4, 'M', 'ې'),
+ (0xFBE8, 'M', 'ى'),
+ ]
+
+def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFBEA, 'M', 'ئا'),
+ (0xFBEC, 'M', 'ئە'),
+ (0xFBEE, 'M', 'ئو'),
+ (0xFBF0, 'M', 'ئۇ'),
+ (0xFBF2, 'M', 'ئۆ'),
+ (0xFBF4, 'M', 'ئۈ'),
+ (0xFBF6, 'M', 'ئې'),
+ (0xFBF9, 'M', 'ئى'),
+ (0xFBFC, 'M', 'ی'),
+ (0xFC00, 'M', 'ئج'),
+ (0xFC01, 'M', 'ئح'),
+ (0xFC02, 'M', 'ئم'),
+ (0xFC03, 'M', 'ئى'),
+ (0xFC04, 'M', 'ئي'),
+ (0xFC05, 'M', 'بج'),
+ (0xFC06, 'M', 'بح'),
+ (0xFC07, 'M', 'بخ'),
+ (0xFC08, 'M', 'بم'),
+ (0xFC09, 'M', 'بى'),
+ (0xFC0A, 'M', 'بي'),
+ (0xFC0B, 'M', 'تج'),
+ (0xFC0C, 'M', 'تح'),
+ (0xFC0D, 'M', 'تخ'),
+ (0xFC0E, 'M', 'تم'),
+ (0xFC0F, 'M', 'تى'),
+ (0xFC10, 'M', 'تي'),
+ (0xFC11, 'M', 'ثج'),
+ (0xFC12, 'M', 'ثم'),
+ (0xFC13, 'M', 'ثى'),
+ (0xFC14, 'M', 'ثي'),
+ (0xFC15, 'M', 'جح'),
+ (0xFC16, 'M', 'جم'),
+ (0xFC17, 'M', 'حج'),
+ (0xFC18, 'M', 'حم'),
+ (0xFC19, 'M', 'خج'),
+ (0xFC1A, 'M', 'خح'),
+ (0xFC1B, 'M', 'خم'),
+ (0xFC1C, 'M', 'سج'),
+ (0xFC1D, 'M', 'سح'),
+ (0xFC1E, 'M', 'سخ'),
+ (0xFC1F, 'M', 'سم'),
+ (0xFC20, 'M', 'صح'),
+ (0xFC21, 'M', 'صم'),
+ (0xFC22, 'M', 'ضج'),
+ (0xFC23, 'M', 'ضح'),
+ (0xFC24, 'M', 'ضخ'),
+ (0xFC25, 'M', 'ضم'),
+ (0xFC26, 'M', 'طح'),
+ (0xFC27, 'M', 'طم'),
+ (0xFC28, 'M', 'ظم'),
+ (0xFC29, 'M', 'عج'),
+ (0xFC2A, 'M', 'عم'),
+ (0xFC2B, 'M', 'غج'),
+ (0xFC2C, 'M', 'غم'),
+ (0xFC2D, 'M', 'فج'),
+ (0xFC2E, 'M', 'فح'),
+ (0xFC2F, 'M', 'فخ'),
+ (0xFC30, 'M', 'فم'),
+ (0xFC31, 'M', 'فى'),
+ (0xFC32, 'M', 'في'),
+ (0xFC33, 'M', 'قح'),
+ (0xFC34, 'M', 'قم'),
+ (0xFC35, 'M', 'قى'),
+ (0xFC36, 'M', 'قي'),
+ (0xFC37, 'M', 'كا'),
+ (0xFC38, 'M', 'كج'),
+ (0xFC39, 'M', 'كح'),
+ (0xFC3A, 'M', 'كخ'),
+ (0xFC3B, 'M', 'كل'),
+ (0xFC3C, 'M', 'كم'),
+ (0xFC3D, 'M', 'كى'),
+ (0xFC3E, 'M', 'كي'),
+ (0xFC3F, 'M', 'لج'),
+ (0xFC40, 'M', 'لح'),
+ (0xFC41, 'M', 'لخ'),
+ (0xFC42, 'M', 'لم'),
+ (0xFC43, 'M', 'لى'),
+ (0xFC44, 'M', 'لي'),
+ (0xFC45, 'M', 'مج'),
+ (0xFC46, 'M', 'مح'),
+ (0xFC47, 'M', 'مخ'),
+ (0xFC48, 'M', 'مم'),
+ (0xFC49, 'M', 'مى'),
+ (0xFC4A, 'M', 'مي'),
+ (0xFC4B, 'M', 'نج'),
+ (0xFC4C, 'M', 'نح'),
+ (0xFC4D, 'M', 'نخ'),
+ (0xFC4E, 'M', 'نم'),
+ (0xFC4F, 'M', 'نى'),
+ (0xFC50, 'M', 'ني'),
+ (0xFC51, 'M', 'هج'),
+ (0xFC52, 'M', 'هم'),
+ (0xFC53, 'M', 'هى'),
+ (0xFC54, 'M', 'هي'),
+ (0xFC55, 'M', 'يج'),
+ (0xFC56, 'M', 'يح'),
+ (0xFC57, 'M', 'يخ'),
+ (0xFC58, 'M', 'يم'),
+ (0xFC59, 'M', 'يى'),
+ (0xFC5A, 'M', 'يي'),
+ ]
+
+def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFC5B, 'M', 'ذٰ'),
+ (0xFC5C, 'M', 'رٰ'),
+ (0xFC5D, 'M', 'ىٰ'),
+ (0xFC5E, '3', ' ٌّ'),
+ (0xFC5F, '3', ' ٍّ'),
+ (0xFC60, '3', ' َّ'),
+ (0xFC61, '3', ' ُّ'),
+ (0xFC62, '3', ' ِّ'),
+ (0xFC63, '3', ' ّٰ'),
+ (0xFC64, 'M', 'ئر'),
+ (0xFC65, 'M', 'ئز'),
+ (0xFC66, 'M', 'ئم'),
+ (0xFC67, 'M', 'ئن'),
+ (0xFC68, 'M', 'ئى'),
+ (0xFC69, 'M', 'ئي'),
+ (0xFC6A, 'M', 'بر'),
+ (0xFC6B, 'M', 'بز'),
+ (0xFC6C, 'M', 'بم'),
+ (0xFC6D, 'M', 'بن'),
+ (0xFC6E, 'M', 'بى'),
+ (0xFC6F, 'M', 'بي'),
+ (0xFC70, 'M', 'تر'),
+ (0xFC71, 'M', 'تز'),
+ (0xFC72, 'M', 'تم'),
+ (0xFC73, 'M', 'تن'),
+ (0xFC74, 'M', 'تى'),
+ (0xFC75, 'M', 'تي'),
+ (0xFC76, 'M', 'ثر'),
+ (0xFC77, 'M', 'ثز'),
+ (0xFC78, 'M', 'ثم'),
+ (0xFC79, 'M', 'ثن'),
+ (0xFC7A, 'M', 'ثى'),
+ (0xFC7B, 'M', 'ثي'),
+ (0xFC7C, 'M', 'فى'),
+ (0xFC7D, 'M', 'في'),
+ (0xFC7E, 'M', 'قى'),
+ (0xFC7F, 'M', 'قي'),
+ (0xFC80, 'M', 'كا'),
+ (0xFC81, 'M', 'كل'),
+ (0xFC82, 'M', 'كم'),
+ (0xFC83, 'M', 'كى'),
+ (0xFC84, 'M', 'كي'),
+ (0xFC85, 'M', 'لم'),
+ (0xFC86, 'M', 'لى'),
+ (0xFC87, 'M', 'لي'),
+ (0xFC88, 'M', 'ما'),
+ (0xFC89, 'M', 'مم'),
+ (0xFC8A, 'M', 'نر'),
+ (0xFC8B, 'M', 'نز'),
+ (0xFC8C, 'M', 'نم'),
+ (0xFC8D, 'M', 'نن'),
+ (0xFC8E, 'M', 'نى'),
+ (0xFC8F, 'M', 'ني'),
+ (0xFC90, 'M', 'ىٰ'),
+ (0xFC91, 'M', 'ير'),
+ (0xFC92, 'M', 'يز'),
+ (0xFC93, 'M', 'يم'),
+ (0xFC94, 'M', 'ين'),
+ (0xFC95, 'M', 'يى'),
+ (0xFC96, 'M', 'يي'),
+ (0xFC97, 'M', 'ئج'),
+ (0xFC98, 'M', 'ئح'),
+ (0xFC99, 'M', 'ئخ'),
+ (0xFC9A, 'M', 'ئم'),
+ (0xFC9B, 'M', 'ئه'),
+ (0xFC9C, 'M', 'بج'),
+ (0xFC9D, 'M', 'بح'),
+ (0xFC9E, 'M', 'بخ'),
+ (0xFC9F, 'M', 'بم'),
+ (0xFCA0, 'M', 'به'),
+ (0xFCA1, 'M', 'تج'),
+ (0xFCA2, 'M', 'تح'),
+ (0xFCA3, 'M', 'تخ'),
+ (0xFCA4, 'M', 'تم'),
+ (0xFCA5, 'M', 'ته'),
+ (0xFCA6, 'M', 'ثم'),
+ (0xFCA7, 'M', 'جح'),
+ (0xFCA8, 'M', 'جم'),
+ (0xFCA9, 'M', 'حج'),
+ (0xFCAA, 'M', 'حم'),
+ (0xFCAB, 'M', 'خج'),
+ (0xFCAC, 'M', 'خم'),
+ (0xFCAD, 'M', 'سج'),
+ (0xFCAE, 'M', 'سح'),
+ (0xFCAF, 'M', 'سخ'),
+ (0xFCB0, 'M', 'سم'),
+ (0xFCB1, 'M', 'صح'),
+ (0xFCB2, 'M', 'صخ'),
+ (0xFCB3, 'M', 'صم'),
+ (0xFCB4, 'M', 'ضج'),
+ (0xFCB5, 'M', 'ضح'),
+ (0xFCB6, 'M', 'ضخ'),
+ (0xFCB7, 'M', 'ضم'),
+ (0xFCB8, 'M', 'طح'),
+ (0xFCB9, 'M', 'ظم'),
+ (0xFCBA, 'M', 'عج'),
+ (0xFCBB, 'M', 'عم'),
+ (0xFCBC, 'M', 'غج'),
+ (0xFCBD, 'M', 'غم'),
+ (0xFCBE, 'M', 'فج'),
+ ]
+
+def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFCBF, 'M', 'فح'),
+ (0xFCC0, 'M', 'فخ'),
+ (0xFCC1, 'M', 'فم'),
+ (0xFCC2, 'M', 'قح'),
+ (0xFCC3, 'M', 'قم'),
+ (0xFCC4, 'M', 'كج'),
+ (0xFCC5, 'M', 'كح'),
+ (0xFCC6, 'M', 'كخ'),
+ (0xFCC7, 'M', 'كل'),
+ (0xFCC8, 'M', 'كم'),
+ (0xFCC9, 'M', 'لج'),
+ (0xFCCA, 'M', 'لح'),
+ (0xFCCB, 'M', 'لخ'),
+ (0xFCCC, 'M', 'لم'),
+ (0xFCCD, 'M', 'له'),
+ (0xFCCE, 'M', 'مج'),
+ (0xFCCF, 'M', 'مح'),
+ (0xFCD0, 'M', 'مخ'),
+ (0xFCD1, 'M', 'مم'),
+ (0xFCD2, 'M', 'نج'),
+ (0xFCD3, 'M', 'نح'),
+ (0xFCD4, 'M', 'نخ'),
+ (0xFCD5, 'M', 'نم'),
+ (0xFCD6, 'M', 'نه'),
+ (0xFCD7, 'M', 'هج'),
+ (0xFCD8, 'M', 'هم'),
+ (0xFCD9, 'M', 'هٰ'),
+ (0xFCDA, 'M', 'يج'),
+ (0xFCDB, 'M', 'يح'),
+ (0xFCDC, 'M', 'يخ'),
+ (0xFCDD, 'M', 'يم'),
+ (0xFCDE, 'M', 'يه'),
+ (0xFCDF, 'M', 'ئم'),
+ (0xFCE0, 'M', 'ئه'),
+ (0xFCE1, 'M', 'بم'),
+ (0xFCE2, 'M', 'به'),
+ (0xFCE3, 'M', 'تم'),
+ (0xFCE4, 'M', 'ته'),
+ (0xFCE5, 'M', 'ثم'),
+ (0xFCE6, 'M', 'ثه'),
+ (0xFCE7, 'M', 'سم'),
+ (0xFCE8, 'M', 'سه'),
+ (0xFCE9, 'M', 'شم'),
+ (0xFCEA, 'M', 'شه'),
+ (0xFCEB, 'M', 'كل'),
+ (0xFCEC, 'M', 'كم'),
+ (0xFCED, 'M', 'لم'),
+ (0xFCEE, 'M', 'نم'),
+ (0xFCEF, 'M', 'نه'),
+ (0xFCF0, 'M', 'يم'),
+ (0xFCF1, 'M', 'يه'),
+ (0xFCF2, 'M', 'ـَّ'),
+ (0xFCF3, 'M', 'ـُّ'),
+ (0xFCF4, 'M', 'ـِّ'),
+ (0xFCF5, 'M', 'طى'),
+ (0xFCF6, 'M', 'طي'),
+ (0xFCF7, 'M', 'عى'),
+ (0xFCF8, 'M', 'عي'),
+ (0xFCF9, 'M', 'غى'),
+ (0xFCFA, 'M', 'غي'),
+ (0xFCFB, 'M', 'سى'),
+ (0xFCFC, 'M', 'سي'),
+ (0xFCFD, 'M', 'شى'),
+ (0xFCFE, 'M', 'شي'),
+ (0xFCFF, 'M', 'حى'),
+ (0xFD00, 'M', 'حي'),
+ (0xFD01, 'M', 'جى'),
+ (0xFD02, 'M', 'جي'),
+ (0xFD03, 'M', 'خى'),
+ (0xFD04, 'M', 'خي'),
+ (0xFD05, 'M', 'صى'),
+ (0xFD06, 'M', 'صي'),
+ (0xFD07, 'M', 'ضى'),
+ (0xFD08, 'M', 'ضي'),
+ (0xFD09, 'M', 'شج'),
+ (0xFD0A, 'M', 'شح'),
+ (0xFD0B, 'M', 'شخ'),
+ (0xFD0C, 'M', 'شم'),
+ (0xFD0D, 'M', 'شر'),
+ (0xFD0E, 'M', 'سر'),
+ (0xFD0F, 'M', 'صر'),
+ (0xFD10, 'M', 'ضر'),
+ (0xFD11, 'M', 'طى'),
+ (0xFD12, 'M', 'طي'),
+ (0xFD13, 'M', 'عى'),
+ (0xFD14, 'M', 'عي'),
+ (0xFD15, 'M', 'غى'),
+ (0xFD16, 'M', 'غي'),
+ (0xFD17, 'M', 'سى'),
+ (0xFD18, 'M', 'سي'),
+ (0xFD19, 'M', 'شى'),
+ (0xFD1A, 'M', 'شي'),
+ (0xFD1B, 'M', 'حى'),
+ (0xFD1C, 'M', 'حي'),
+ (0xFD1D, 'M', 'جى'),
+ (0xFD1E, 'M', 'جي'),
+ (0xFD1F, 'M', 'خى'),
+ (0xFD20, 'M', 'خي'),
+ (0xFD21, 'M', 'صى'),
+ (0xFD22, 'M', 'صي'),
+ ]
+
+def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFD23, 'M', 'ضى'),
+ (0xFD24, 'M', 'ضي'),
+ (0xFD25, 'M', 'شج'),
+ (0xFD26, 'M', 'شح'),
+ (0xFD27, 'M', 'شخ'),
+ (0xFD28, 'M', 'شم'),
+ (0xFD29, 'M', 'شر'),
+ (0xFD2A, 'M', 'سر'),
+ (0xFD2B, 'M', 'صر'),
+ (0xFD2C, 'M', 'ضر'),
+ (0xFD2D, 'M', 'شج'),
+ (0xFD2E, 'M', 'شح'),
+ (0xFD2F, 'M', 'شخ'),
+ (0xFD30, 'M', 'شم'),
+ (0xFD31, 'M', 'سه'),
+ (0xFD32, 'M', 'شه'),
+ (0xFD33, 'M', 'طم'),
+ (0xFD34, 'M', 'سج'),
+ (0xFD35, 'M', 'سح'),
+ (0xFD36, 'M', 'سخ'),
+ (0xFD37, 'M', 'شج'),
+ (0xFD38, 'M', 'شح'),
+ (0xFD39, 'M', 'شخ'),
+ (0xFD3A, 'M', 'طم'),
+ (0xFD3B, 'M', 'ظم'),
+ (0xFD3C, 'M', 'اً'),
+ (0xFD3E, 'V'),
+ (0xFD50, 'M', 'تجم'),
+ (0xFD51, 'M', 'تحج'),
+ (0xFD53, 'M', 'تحم'),
+ (0xFD54, 'M', 'تخم'),
+ (0xFD55, 'M', 'تمج'),
+ (0xFD56, 'M', 'تمح'),
+ (0xFD57, 'M', 'تمخ'),
+ (0xFD58, 'M', 'جمح'),
+ (0xFD5A, 'M', 'حمي'),
+ (0xFD5B, 'M', 'حمى'),
+ (0xFD5C, 'M', 'سحج'),
+ (0xFD5D, 'M', 'سجح'),
+ (0xFD5E, 'M', 'سجى'),
+ (0xFD5F, 'M', 'سمح'),
+ (0xFD61, 'M', 'سمج'),
+ (0xFD62, 'M', 'سمم'),
+ (0xFD64, 'M', 'صحح'),
+ (0xFD66, 'M', 'صمم'),
+ (0xFD67, 'M', 'شحم'),
+ (0xFD69, 'M', 'شجي'),
+ (0xFD6A, 'M', 'شمخ'),
+ (0xFD6C, 'M', 'شمم'),
+ (0xFD6E, 'M', 'ضحى'),
+ (0xFD6F, 'M', 'ضخم'),
+ (0xFD71, 'M', 'طمح'),
+ (0xFD73, 'M', 'طمم'),
+ (0xFD74, 'M', 'طمي'),
+ (0xFD75, 'M', 'عجم'),
+ (0xFD76, 'M', 'عمم'),
+ (0xFD78, 'M', 'عمى'),
+ (0xFD79, 'M', 'غمم'),
+ (0xFD7A, 'M', 'غمي'),
+ (0xFD7B, 'M', 'غمى'),
+ (0xFD7C, 'M', 'فخم'),
+ (0xFD7E, 'M', 'قمح'),
+ (0xFD7F, 'M', 'قمم'),
+ (0xFD80, 'M', 'لحم'),
+ (0xFD81, 'M', 'لحي'),
+ (0xFD82, 'M', 'لحى'),
+ (0xFD83, 'M', 'لجج'),
+ (0xFD85, 'M', 'لخم'),
+ (0xFD87, 'M', 'لمح'),
+ (0xFD89, 'M', 'محج'),
+ (0xFD8A, 'M', 'محم'),
+ (0xFD8B, 'M', 'محي'),
+ (0xFD8C, 'M', 'مجح'),
+ (0xFD8D, 'M', 'مجم'),
+ (0xFD8E, 'M', 'مخج'),
+ (0xFD8F, 'M', 'مخم'),
+ (0xFD90, 'X'),
+ (0xFD92, 'M', 'مجخ'),
+ (0xFD93, 'M', 'همج'),
+ (0xFD94, 'M', 'همم'),
+ (0xFD95, 'M', 'نحم'),
+ (0xFD96, 'M', 'نحى'),
+ (0xFD97, 'M', 'نجم'),
+ (0xFD99, 'M', 'نجى'),
+ (0xFD9A, 'M', 'نمي'),
+ (0xFD9B, 'M', 'نمى'),
+ (0xFD9C, 'M', 'يمم'),
+ (0xFD9E, 'M', 'بخي'),
+ (0xFD9F, 'M', 'تجي'),
+ (0xFDA0, 'M', 'تجى'),
+ (0xFDA1, 'M', 'تخي'),
+ (0xFDA2, 'M', 'تخى'),
+ (0xFDA3, 'M', 'تمي'),
+ (0xFDA4, 'M', 'تمى'),
+ (0xFDA5, 'M', 'جمي'),
+ (0xFDA6, 'M', 'جحى'),
+ (0xFDA7, 'M', 'جمى'),
+ (0xFDA8, 'M', 'سخى'),
+ (0xFDA9, 'M', 'صحي'),
+ (0xFDAA, 'M', 'شحي'),
+ ]
+
+def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFDAB, 'M', 'ضحي'),
+ (0xFDAC, 'M', 'لجي'),
+ (0xFDAD, 'M', 'لمي'),
+ (0xFDAE, 'M', 'يحي'),
+ (0xFDAF, 'M', 'يجي'),
+ (0xFDB0, 'M', 'يمي'),
+ (0xFDB1, 'M', 'ممي'),
+ (0xFDB2, 'M', 'قمي'),
+ (0xFDB3, 'M', 'نحي'),
+ (0xFDB4, 'M', 'قمح'),
+ (0xFDB5, 'M', 'لحم'),
+ (0xFDB6, 'M', 'عمي'),
+ (0xFDB7, 'M', 'كمي'),
+ (0xFDB8, 'M', 'نجح'),
+ (0xFDB9, 'M', 'مخي'),
+ (0xFDBA, 'M', 'لجم'),
+ (0xFDBB, 'M', 'كمم'),
+ (0xFDBC, 'M', 'لجم'),
+ (0xFDBD, 'M', 'نجح'),
+ (0xFDBE, 'M', 'جحي'),
+ (0xFDBF, 'M', 'حجي'),
+ (0xFDC0, 'M', 'مجي'),
+ (0xFDC1, 'M', 'فمي'),
+ (0xFDC2, 'M', 'بحي'),
+ (0xFDC3, 'M', 'كمم'),
+ (0xFDC4, 'M', 'عجم'),
+ (0xFDC5, 'M', 'صمم'),
+ (0xFDC6, 'M', 'سخي'),
+ (0xFDC7, 'M', 'نجي'),
+ (0xFDC8, 'X'),
+ (0xFDCF, 'V'),
+ (0xFDD0, 'X'),
+ (0xFDF0, 'M', 'صلے'),
+ (0xFDF1, 'M', 'قلے'),
+ (0xFDF2, 'M', 'الله'),
+ (0xFDF3, 'M', 'اكبر'),
+ (0xFDF4, 'M', 'محمد'),
+ (0xFDF5, 'M', 'صلعم'),
+ (0xFDF6, 'M', 'رسول'),
+ (0xFDF7, 'M', 'عليه'),
+ (0xFDF8, 'M', 'وسلم'),
+ (0xFDF9, 'M', 'صلى'),
+ (0xFDFA, '3', 'صلى الله عليه وسلم'),
+ (0xFDFB, '3', 'جل جلاله'),
+ (0xFDFC, 'M', 'ریال'),
+ (0xFDFD, 'V'),
+ (0xFE00, 'I'),
+ (0xFE10, '3', ','),
+ (0xFE11, 'M', '、'),
+ (0xFE12, 'X'),
+ (0xFE13, '3', ':'),
+ (0xFE14, '3', ';'),
+ (0xFE15, '3', '!'),
+ (0xFE16, '3', '?'),
+ (0xFE17, 'M', '〖'),
+ (0xFE18, 'M', '〗'),
+ (0xFE19, 'X'),
+ (0xFE20, 'V'),
+ (0xFE30, 'X'),
+ (0xFE31, 'M', '—'),
+ (0xFE32, 'M', '–'),
+ (0xFE33, '3', '_'),
+ (0xFE35, '3', '('),
+ (0xFE36, '3', ')'),
+ (0xFE37, '3', '{'),
+ (0xFE38, '3', '}'),
+ (0xFE39, 'M', '〔'),
+ (0xFE3A, 'M', '〕'),
+ (0xFE3B, 'M', '【'),
+ (0xFE3C, 'M', '】'),
+ (0xFE3D, 'M', '《'),
+ (0xFE3E, 'M', '》'),
+ (0xFE3F, 'M', '〈'),
+ (0xFE40, 'M', '〉'),
+ (0xFE41, 'M', '「'),
+ (0xFE42, 'M', '」'),
+ (0xFE43, 'M', '『'),
+ (0xFE44, 'M', '』'),
+ (0xFE45, 'V'),
+ (0xFE47, '3', '['),
+ (0xFE48, '3', ']'),
+ (0xFE49, '3', ' ̅'),
+ (0xFE4D, '3', '_'),
+ (0xFE50, '3', ','),
+ (0xFE51, 'M', '、'),
+ (0xFE52, 'X'),
+ (0xFE54, '3', ';'),
+ (0xFE55, '3', ':'),
+ (0xFE56, '3', '?'),
+ (0xFE57, '3', '!'),
+ (0xFE58, 'M', '—'),
+ (0xFE59, '3', '('),
+ (0xFE5A, '3', ')'),
+ (0xFE5B, '3', '{'),
+ (0xFE5C, '3', '}'),
+ (0xFE5D, 'M', '〔'),
+ (0xFE5E, 'M', '〕'),
+ (0xFE5F, '3', '#'),
+ (0xFE60, '3', '&'),
+ (0xFE61, '3', '*'),
+ ]
+
+def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFE62, '3', '+'),
+ (0xFE63, 'M', '-'),
+ (0xFE64, '3', '<'),
+ (0xFE65, '3', '>'),
+ (0xFE66, '3', '='),
+ (0xFE67, 'X'),
+ (0xFE68, '3', '\\'),
+ (0xFE69, '3', '$'),
+ (0xFE6A, '3', '%'),
+ (0xFE6B, '3', '@'),
+ (0xFE6C, 'X'),
+ (0xFE70, '3', ' ً'),
+ (0xFE71, 'M', 'ـً'),
+ (0xFE72, '3', ' ٌ'),
+ (0xFE73, 'V'),
+ (0xFE74, '3', ' ٍ'),
+ (0xFE75, 'X'),
+ (0xFE76, '3', ' َ'),
+ (0xFE77, 'M', 'ـَ'),
+ (0xFE78, '3', ' ُ'),
+ (0xFE79, 'M', 'ـُ'),
+ (0xFE7A, '3', ' ِ'),
+ (0xFE7B, 'M', 'ـِ'),
+ (0xFE7C, '3', ' ّ'),
+ (0xFE7D, 'M', 'ـّ'),
+ (0xFE7E, '3', ' ْ'),
+ (0xFE7F, 'M', 'ـْ'),
+ (0xFE80, 'M', 'ء'),
+ (0xFE81, 'M', 'آ'),
+ (0xFE83, 'M', 'أ'),
+ (0xFE85, 'M', 'ؤ'),
+ (0xFE87, 'M', 'إ'),
+ (0xFE89, 'M', 'ئ'),
+ (0xFE8D, 'M', 'ا'),
+ (0xFE8F, 'M', 'ب'),
+ (0xFE93, 'M', 'ة'),
+ (0xFE95, 'M', 'ت'),
+ (0xFE99, 'M', 'ث'),
+ (0xFE9D, 'M', 'ج'),
+ (0xFEA1, 'M', 'ح'),
+ (0xFEA5, 'M', 'خ'),
+ (0xFEA9, 'M', 'د'),
+ (0xFEAB, 'M', 'ذ'),
+ (0xFEAD, 'M', 'ر'),
+ (0xFEAF, 'M', 'ز'),
+ (0xFEB1, 'M', 'س'),
+ (0xFEB5, 'M', 'ش'),
+ (0xFEB9, 'M', 'ص'),
+ (0xFEBD, 'M', 'ض'),
+ (0xFEC1, 'M', 'ط'),
+ (0xFEC5, 'M', 'ظ'),
+ (0xFEC9, 'M', 'ع'),
+ (0xFECD, 'M', 'غ'),
+ (0xFED1, 'M', 'ف'),
+ (0xFED5, 'M', 'ق'),
+ (0xFED9, 'M', 'ك'),
+ (0xFEDD, 'M', 'ل'),
+ (0xFEE1, 'M', 'م'),
+ (0xFEE5, 'M', 'ن'),
+ (0xFEE9, 'M', 'ه'),
+ (0xFEED, 'M', 'و'),
+ (0xFEEF, 'M', 'ى'),
+ (0xFEF1, 'M', 'ي'),
+ (0xFEF5, 'M', 'لآ'),
+ (0xFEF7, 'M', 'لأ'),
+ (0xFEF9, 'M', 'لإ'),
+ (0xFEFB, 'M', 'لا'),
+ (0xFEFD, 'X'),
+ (0xFEFF, 'I'),
+ (0xFF00, 'X'),
+ (0xFF01, '3', '!'),
+ (0xFF02, '3', '"'),
+ (0xFF03, '3', '#'),
+ (0xFF04, '3', '$'),
+ (0xFF05, '3', '%'),
+ (0xFF06, '3', '&'),
+ (0xFF07, '3', '\''),
+ (0xFF08, '3', '('),
+ (0xFF09, '3', ')'),
+ (0xFF0A, '3', '*'),
+ (0xFF0B, '3', '+'),
+ (0xFF0C, '3', ','),
+ (0xFF0D, 'M', '-'),
+ (0xFF0E, 'M', '.'),
+ (0xFF0F, '3', '/'),
+ (0xFF10, 'M', '0'),
+ (0xFF11, 'M', '1'),
+ (0xFF12, 'M', '2'),
+ (0xFF13, 'M', '3'),
+ (0xFF14, 'M', '4'),
+ (0xFF15, 'M', '5'),
+ (0xFF16, 'M', '6'),
+ (0xFF17, 'M', '7'),
+ (0xFF18, 'M', '8'),
+ (0xFF19, 'M', '9'),
+ (0xFF1A, '3', ':'),
+ (0xFF1B, '3', ';'),
+ (0xFF1C, '3', '<'),
+ (0xFF1D, '3', '='),
+ (0xFF1E, '3', '>'),
+ ]
+
+def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFF1F, '3', '?'),
+ (0xFF20, '3', '@'),
+ (0xFF21, 'M', 'a'),
+ (0xFF22, 'M', 'b'),
+ (0xFF23, 'M', 'c'),
+ (0xFF24, 'M', 'd'),
+ (0xFF25, 'M', 'e'),
+ (0xFF26, 'M', 'f'),
+ (0xFF27, 'M', 'g'),
+ (0xFF28, 'M', 'h'),
+ (0xFF29, 'M', 'i'),
+ (0xFF2A, 'M', 'j'),
+ (0xFF2B, 'M', 'k'),
+ (0xFF2C, 'M', 'l'),
+ (0xFF2D, 'M', 'm'),
+ (0xFF2E, 'M', 'n'),
+ (0xFF2F, 'M', 'o'),
+ (0xFF30, 'M', 'p'),
+ (0xFF31, 'M', 'q'),
+ (0xFF32, 'M', 'r'),
+ (0xFF33, 'M', 's'),
+ (0xFF34, 'M', 't'),
+ (0xFF35, 'M', 'u'),
+ (0xFF36, 'M', 'v'),
+ (0xFF37, 'M', 'w'),
+ (0xFF38, 'M', 'x'),
+ (0xFF39, 'M', 'y'),
+ (0xFF3A, 'M', 'z'),
+ (0xFF3B, '3', '['),
+ (0xFF3C, '3', '\\'),
+ (0xFF3D, '3', ']'),
+ (0xFF3E, '3', '^'),
+ (0xFF3F, '3', '_'),
+ (0xFF40, '3', '`'),
+ (0xFF41, 'M', 'a'),
+ (0xFF42, 'M', 'b'),
+ (0xFF43, 'M', 'c'),
+ (0xFF44, 'M', 'd'),
+ (0xFF45, 'M', 'e'),
+ (0xFF46, 'M', 'f'),
+ (0xFF47, 'M', 'g'),
+ (0xFF48, 'M', 'h'),
+ (0xFF49, 'M', 'i'),
+ (0xFF4A, 'M', 'j'),
+ (0xFF4B, 'M', 'k'),
+ (0xFF4C, 'M', 'l'),
+ (0xFF4D, 'M', 'm'),
+ (0xFF4E, 'M', 'n'),
+ (0xFF4F, 'M', 'o'),
+ (0xFF50, 'M', 'p'),
+ (0xFF51, 'M', 'q'),
+ (0xFF52, 'M', 'r'),
+ (0xFF53, 'M', 's'),
+ (0xFF54, 'M', 't'),
+ (0xFF55, 'M', 'u'),
+ (0xFF56, 'M', 'v'),
+ (0xFF57, 'M', 'w'),
+ (0xFF58, 'M', 'x'),
+ (0xFF59, 'M', 'y'),
+ (0xFF5A, 'M', 'z'),
+ (0xFF5B, '3', '{'),
+ (0xFF5C, '3', '|'),
+ (0xFF5D, '3', '}'),
+ (0xFF5E, '3', '~'),
+ (0xFF5F, 'M', '⦅'),
+ (0xFF60, 'M', '⦆'),
+ (0xFF61, 'M', '.'),
+ (0xFF62, 'M', '「'),
+ (0xFF63, 'M', '」'),
+ (0xFF64, 'M', '、'),
+ (0xFF65, 'M', '・'),
+ (0xFF66, 'M', 'ヲ'),
+ (0xFF67, 'M', 'ァ'),
+ (0xFF68, 'M', 'ィ'),
+ (0xFF69, 'M', 'ゥ'),
+ (0xFF6A, 'M', 'ェ'),
+ (0xFF6B, 'M', 'ォ'),
+ (0xFF6C, 'M', 'ャ'),
+ (0xFF6D, 'M', 'ュ'),
+ (0xFF6E, 'M', 'ョ'),
+ (0xFF6F, 'M', 'ッ'),
+ (0xFF70, 'M', 'ー'),
+ (0xFF71, 'M', 'ア'),
+ (0xFF72, 'M', 'イ'),
+ (0xFF73, 'M', 'ウ'),
+ (0xFF74, 'M', 'エ'),
+ (0xFF75, 'M', 'オ'),
+ (0xFF76, 'M', 'カ'),
+ (0xFF77, 'M', 'キ'),
+ (0xFF78, 'M', 'ク'),
+ (0xFF79, 'M', 'ケ'),
+ (0xFF7A, 'M', 'コ'),
+ (0xFF7B, 'M', 'サ'),
+ (0xFF7C, 'M', 'シ'),
+ (0xFF7D, 'M', 'ス'),
+ (0xFF7E, 'M', 'セ'),
+ (0xFF7F, 'M', 'ソ'),
+ (0xFF80, 'M', 'タ'),
+ (0xFF81, 'M', 'チ'),
+ (0xFF82, 'M', 'ツ'),
+ ]
+
+def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFF83, 'M', 'テ'),
+ (0xFF84, 'M', 'ト'),
+ (0xFF85, 'M', 'ナ'),
+ (0xFF86, 'M', 'ニ'),
+ (0xFF87, 'M', 'ヌ'),
+ (0xFF88, 'M', 'ネ'),
+ (0xFF89, 'M', 'ノ'),
+ (0xFF8A, 'M', 'ハ'),
+ (0xFF8B, 'M', 'ヒ'),
+ (0xFF8C, 'M', 'フ'),
+ (0xFF8D, 'M', 'ヘ'),
+ (0xFF8E, 'M', 'ホ'),
+ (0xFF8F, 'M', 'マ'),
+ (0xFF90, 'M', 'ミ'),
+ (0xFF91, 'M', 'ム'),
+ (0xFF92, 'M', 'メ'),
+ (0xFF93, 'M', 'モ'),
+ (0xFF94, 'M', 'ヤ'),
+ (0xFF95, 'M', 'ユ'),
+ (0xFF96, 'M', 'ヨ'),
+ (0xFF97, 'M', 'ラ'),
+ (0xFF98, 'M', 'リ'),
+ (0xFF99, 'M', 'ル'),
+ (0xFF9A, 'M', 'レ'),
+ (0xFF9B, 'M', 'ロ'),
+ (0xFF9C, 'M', 'ワ'),
+ (0xFF9D, 'M', 'ン'),
+ (0xFF9E, 'M', '゙'),
+ (0xFF9F, 'M', '゚'),
+ (0xFFA0, 'X'),
+ (0xFFA1, 'M', 'ᄀ'),
+ (0xFFA2, 'M', 'ᄁ'),
+ (0xFFA3, 'M', 'ᆪ'),
+ (0xFFA4, 'M', 'ᄂ'),
+ (0xFFA5, 'M', 'ᆬ'),
+ (0xFFA6, 'M', 'ᆭ'),
+ (0xFFA7, 'M', 'ᄃ'),
+ (0xFFA8, 'M', 'ᄄ'),
+ (0xFFA9, 'M', 'ᄅ'),
+ (0xFFAA, 'M', 'ᆰ'),
+ (0xFFAB, 'M', 'ᆱ'),
+ (0xFFAC, 'M', 'ᆲ'),
+ (0xFFAD, 'M', 'ᆳ'),
+ (0xFFAE, 'M', 'ᆴ'),
+ (0xFFAF, 'M', 'ᆵ'),
+ (0xFFB0, 'M', 'ᄚ'),
+ (0xFFB1, 'M', 'ᄆ'),
+ (0xFFB2, 'M', 'ᄇ'),
+ (0xFFB3, 'M', 'ᄈ'),
+ (0xFFB4, 'M', 'ᄡ'),
+ (0xFFB5, 'M', 'ᄉ'),
+ (0xFFB6, 'M', 'ᄊ'),
+ (0xFFB7, 'M', 'ᄋ'),
+ (0xFFB8, 'M', 'ᄌ'),
+ (0xFFB9, 'M', 'ᄍ'),
+ (0xFFBA, 'M', 'ᄎ'),
+ (0xFFBB, 'M', 'ᄏ'),
+ (0xFFBC, 'M', 'ᄐ'),
+ (0xFFBD, 'M', 'ᄑ'),
+ (0xFFBE, 'M', 'ᄒ'),
+ (0xFFBF, 'X'),
+ (0xFFC2, 'M', 'ᅡ'),
+ (0xFFC3, 'M', 'ᅢ'),
+ (0xFFC4, 'M', 'ᅣ'),
+ (0xFFC5, 'M', 'ᅤ'),
+ (0xFFC6, 'M', 'ᅥ'),
+ (0xFFC7, 'M', 'ᅦ'),
+ (0xFFC8, 'X'),
+ (0xFFCA, 'M', 'ᅧ'),
+ (0xFFCB, 'M', 'ᅨ'),
+ (0xFFCC, 'M', 'ᅩ'),
+ (0xFFCD, 'M', 'ᅪ'),
+ (0xFFCE, 'M', 'ᅫ'),
+ (0xFFCF, 'M', 'ᅬ'),
+ (0xFFD0, 'X'),
+ (0xFFD2, 'M', 'ᅭ'),
+ (0xFFD3, 'M', 'ᅮ'),
+ (0xFFD4, 'M', 'ᅯ'),
+ (0xFFD5, 'M', 'ᅰ'),
+ (0xFFD6, 'M', 'ᅱ'),
+ (0xFFD7, 'M', 'ᅲ'),
+ (0xFFD8, 'X'),
+ (0xFFDA, 'M', 'ᅳ'),
+ (0xFFDB, 'M', 'ᅴ'),
+ (0xFFDC, 'M', 'ᅵ'),
+ (0xFFDD, 'X'),
+ (0xFFE0, 'M', '¢'),
+ (0xFFE1, 'M', '£'),
+ (0xFFE2, 'M', '¬'),
+ (0xFFE3, '3', ' ̄'),
+ (0xFFE4, 'M', '¦'),
+ (0xFFE5, 'M', '¥'),
+ (0xFFE6, 'M', '₩'),
+ (0xFFE7, 'X'),
+ (0xFFE8, 'M', '│'),
+ (0xFFE9, 'M', '←'),
+ (0xFFEA, 'M', '↑'),
+ (0xFFEB, 'M', '→'),
+ (0xFFEC, 'M', '↓'),
+ (0xFFED, 'M', '■'),
+ ]
+
+def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFFEE, 'M', '○'),
+ (0xFFEF, 'X'),
+ (0x10000, 'V'),
+ (0x1000C, 'X'),
+ (0x1000D, 'V'),
+ (0x10027, 'X'),
+ (0x10028, 'V'),
+ (0x1003B, 'X'),
+ (0x1003C, 'V'),
+ (0x1003E, 'X'),
+ (0x1003F, 'V'),
+ (0x1004E, 'X'),
+ (0x10050, 'V'),
+ (0x1005E, 'X'),
+ (0x10080, 'V'),
+ (0x100FB, 'X'),
+ (0x10100, 'V'),
+ (0x10103, 'X'),
+ (0x10107, 'V'),
+ (0x10134, 'X'),
+ (0x10137, 'V'),
+ (0x1018F, 'X'),
+ (0x10190, 'V'),
+ (0x1019D, 'X'),
+ (0x101A0, 'V'),
+ (0x101A1, 'X'),
+ (0x101D0, 'V'),
+ (0x101FE, 'X'),
+ (0x10280, 'V'),
+ (0x1029D, 'X'),
+ (0x102A0, 'V'),
+ (0x102D1, 'X'),
+ (0x102E0, 'V'),
+ (0x102FC, 'X'),
+ (0x10300, 'V'),
+ (0x10324, 'X'),
+ (0x1032D, 'V'),
+ (0x1034B, 'X'),
+ (0x10350, 'V'),
+ (0x1037B, 'X'),
+ (0x10380, 'V'),
+ (0x1039E, 'X'),
+ (0x1039F, 'V'),
+ (0x103C4, 'X'),
+ (0x103C8, 'V'),
+ (0x103D6, 'X'),
+ (0x10400, 'M', '𐐨'),
+ (0x10401, 'M', '𐐩'),
+ (0x10402, 'M', '𐐪'),
+ (0x10403, 'M', '𐐫'),
+ (0x10404, 'M', '𐐬'),
+ (0x10405, 'M', '𐐭'),
+ (0x10406, 'M', '𐐮'),
+ (0x10407, 'M', '𐐯'),
+ (0x10408, 'M', '𐐰'),
+ (0x10409, 'M', '𐐱'),
+ (0x1040A, 'M', '𐐲'),
+ (0x1040B, 'M', '𐐳'),
+ (0x1040C, 'M', '𐐴'),
+ (0x1040D, 'M', '𐐵'),
+ (0x1040E, 'M', '𐐶'),
+ (0x1040F, 'M', '𐐷'),
+ (0x10410, 'M', '𐐸'),
+ (0x10411, 'M', '𐐹'),
+ (0x10412, 'M', '𐐺'),
+ (0x10413, 'M', '𐐻'),
+ (0x10414, 'M', '𐐼'),
+ (0x10415, 'M', '𐐽'),
+ (0x10416, 'M', '𐐾'),
+ (0x10417, 'M', '𐐿'),
+ (0x10418, 'M', '𐑀'),
+ (0x10419, 'M', '𐑁'),
+ (0x1041A, 'M', '𐑂'),
+ (0x1041B, 'M', '𐑃'),
+ (0x1041C, 'M', '𐑄'),
+ (0x1041D, 'M', '𐑅'),
+ (0x1041E, 'M', '𐑆'),
+ (0x1041F, 'M', '𐑇'),
+ (0x10420, 'M', '𐑈'),
+ (0x10421, 'M', '𐑉'),
+ (0x10422, 'M', '𐑊'),
+ (0x10423, 'M', '𐑋'),
+ (0x10424, 'M', '𐑌'),
+ (0x10425, 'M', '𐑍'),
+ (0x10426, 'M', '𐑎'),
+ (0x10427, 'M', '𐑏'),
+ (0x10428, 'V'),
+ (0x1049E, 'X'),
+ (0x104A0, 'V'),
+ (0x104AA, 'X'),
+ (0x104B0, 'M', '𐓘'),
+ (0x104B1, 'M', '𐓙'),
+ (0x104B2, 'M', '𐓚'),
+ (0x104B3, 'M', '𐓛'),
+ (0x104B4, 'M', '𐓜'),
+ (0x104B5, 'M', '𐓝'),
+ (0x104B6, 'M', '𐓞'),
+ (0x104B7, 'M', '𐓟'),
+ (0x104B8, 'M', '𐓠'),
+ (0x104B9, 'M', '𐓡'),
+ ]
+
+def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x104BA, 'M', '𐓢'),
+ (0x104BB, 'M', '𐓣'),
+ (0x104BC, 'M', '𐓤'),
+ (0x104BD, 'M', '𐓥'),
+ (0x104BE, 'M', '𐓦'),
+ (0x104BF, 'M', '𐓧'),
+ (0x104C0, 'M', '𐓨'),
+ (0x104C1, 'M', '𐓩'),
+ (0x104C2, 'M', '𐓪'),
+ (0x104C3, 'M', '𐓫'),
+ (0x104C4, 'M', '𐓬'),
+ (0x104C5, 'M', '𐓭'),
+ (0x104C6, 'M', '𐓮'),
+ (0x104C7, 'M', '𐓯'),
+ (0x104C8, 'M', '𐓰'),
+ (0x104C9, 'M', '𐓱'),
+ (0x104CA, 'M', '𐓲'),
+ (0x104CB, 'M', '𐓳'),
+ (0x104CC, 'M', '𐓴'),
+ (0x104CD, 'M', '𐓵'),
+ (0x104CE, 'M', '𐓶'),
+ (0x104CF, 'M', '𐓷'),
+ (0x104D0, 'M', '𐓸'),
+ (0x104D1, 'M', '𐓹'),
+ (0x104D2, 'M', '𐓺'),
+ (0x104D3, 'M', '𐓻'),
+ (0x104D4, 'X'),
+ (0x104D8, 'V'),
+ (0x104FC, 'X'),
+ (0x10500, 'V'),
+ (0x10528, 'X'),
+ (0x10530, 'V'),
+ (0x10564, 'X'),
+ (0x1056F, 'V'),
+ (0x10570, 'M', '𐖗'),
+ (0x10571, 'M', '𐖘'),
+ (0x10572, 'M', '𐖙'),
+ (0x10573, 'M', '𐖚'),
+ (0x10574, 'M', '𐖛'),
+ (0x10575, 'M', '𐖜'),
+ (0x10576, 'M', '𐖝'),
+ (0x10577, 'M', '𐖞'),
+ (0x10578, 'M', '𐖟'),
+ (0x10579, 'M', '𐖠'),
+ (0x1057A, 'M', '𐖡'),
+ (0x1057B, 'X'),
+ (0x1057C, 'M', '𐖣'),
+ (0x1057D, 'M', '𐖤'),
+ (0x1057E, 'M', '𐖥'),
+ (0x1057F, 'M', '𐖦'),
+ (0x10580, 'M', '𐖧'),
+ (0x10581, 'M', '𐖨'),
+ (0x10582, 'M', '𐖩'),
+ (0x10583, 'M', '𐖪'),
+ (0x10584, 'M', '𐖫'),
+ (0x10585, 'M', '𐖬'),
+ (0x10586, 'M', '𐖭'),
+ (0x10587, 'M', '𐖮'),
+ (0x10588, 'M', '𐖯'),
+ (0x10589, 'M', '𐖰'),
+ (0x1058A, 'M', '𐖱'),
+ (0x1058B, 'X'),
+ (0x1058C, 'M', '𐖳'),
+ (0x1058D, 'M', '𐖴'),
+ (0x1058E, 'M', '𐖵'),
+ (0x1058F, 'M', '𐖶'),
+ (0x10590, 'M', '𐖷'),
+ (0x10591, 'M', '𐖸'),
+ (0x10592, 'M', '𐖹'),
+ (0x10593, 'X'),
+ (0x10594, 'M', '𐖻'),
+ (0x10595, 'M', '𐖼'),
+ (0x10596, 'X'),
+ (0x10597, 'V'),
+ (0x105A2, 'X'),
+ (0x105A3, 'V'),
+ (0x105B2, 'X'),
+ (0x105B3, 'V'),
+ (0x105BA, 'X'),
+ (0x105BB, 'V'),
+ (0x105BD, 'X'),
+ (0x10600, 'V'),
+ (0x10737, 'X'),
+ (0x10740, 'V'),
+ (0x10756, 'X'),
+ (0x10760, 'V'),
+ (0x10768, 'X'),
+ (0x10780, 'V'),
+ (0x10781, 'M', 'ː'),
+ (0x10782, 'M', 'ˑ'),
+ (0x10783, 'M', 'æ'),
+ (0x10784, 'M', 'ʙ'),
+ (0x10785, 'M', 'ɓ'),
+ (0x10786, 'X'),
+ (0x10787, 'M', 'ʣ'),
+ (0x10788, 'M', 'ꭦ'),
+ (0x10789, 'M', 'ʥ'),
+ (0x1078A, 'M', 'ʤ'),
+ (0x1078B, 'M', 'ɖ'),
+ (0x1078C, 'M', 'ɗ'),
+ ]
+
+def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1078D, 'M', 'ᶑ'),
+ (0x1078E, 'M', 'ɘ'),
+ (0x1078F, 'M', 'ɞ'),
+ (0x10790, 'M', 'ʩ'),
+ (0x10791, 'M', 'ɤ'),
+ (0x10792, 'M', 'ɢ'),
+ (0x10793, 'M', 'ɠ'),
+ (0x10794, 'M', 'ʛ'),
+ (0x10795, 'M', 'ħ'),
+ (0x10796, 'M', 'ʜ'),
+ (0x10797, 'M', 'ɧ'),
+ (0x10798, 'M', 'ʄ'),
+ (0x10799, 'M', 'ʪ'),
+ (0x1079A, 'M', 'ʫ'),
+ (0x1079B, 'M', 'ɬ'),
+ (0x1079C, 'M', '𝼄'),
+ (0x1079D, 'M', 'ꞎ'),
+ (0x1079E, 'M', 'ɮ'),
+ (0x1079F, 'M', '𝼅'),
+ (0x107A0, 'M', 'ʎ'),
+ (0x107A1, 'M', '𝼆'),
+ (0x107A2, 'M', 'ø'),
+ (0x107A3, 'M', 'ɶ'),
+ (0x107A4, 'M', 'ɷ'),
+ (0x107A5, 'M', 'q'),
+ (0x107A6, 'M', 'ɺ'),
+ (0x107A7, 'M', '𝼈'),
+ (0x107A8, 'M', 'ɽ'),
+ (0x107A9, 'M', 'ɾ'),
+ (0x107AA, 'M', 'ʀ'),
+ (0x107AB, 'M', 'ʨ'),
+ (0x107AC, 'M', 'ʦ'),
+ (0x107AD, 'M', 'ꭧ'),
+ (0x107AE, 'M', 'ʧ'),
+ (0x107AF, 'M', 'ʈ'),
+ (0x107B0, 'M', 'ⱱ'),
+ (0x107B1, 'X'),
+ (0x107B2, 'M', 'ʏ'),
+ (0x107B3, 'M', 'ʡ'),
+ (0x107B4, 'M', 'ʢ'),
+ (0x107B5, 'M', 'ʘ'),
+ (0x107B6, 'M', 'ǀ'),
+ (0x107B7, 'M', 'ǁ'),
+ (0x107B8, 'M', 'ǂ'),
+ (0x107B9, 'M', '𝼊'),
+ (0x107BA, 'M', '𝼞'),
+ (0x107BB, 'X'),
+ (0x10800, 'V'),
+ (0x10806, 'X'),
+ (0x10808, 'V'),
+ (0x10809, 'X'),
+ (0x1080A, 'V'),
+ (0x10836, 'X'),
+ (0x10837, 'V'),
+ (0x10839, 'X'),
+ (0x1083C, 'V'),
+ (0x1083D, 'X'),
+ (0x1083F, 'V'),
+ (0x10856, 'X'),
+ (0x10857, 'V'),
+ (0x1089F, 'X'),
+ (0x108A7, 'V'),
+ (0x108B0, 'X'),
+ (0x108E0, 'V'),
+ (0x108F3, 'X'),
+ (0x108F4, 'V'),
+ (0x108F6, 'X'),
+ (0x108FB, 'V'),
+ (0x1091C, 'X'),
+ (0x1091F, 'V'),
+ (0x1093A, 'X'),
+ (0x1093F, 'V'),
+ (0x10940, 'X'),
+ (0x10980, 'V'),
+ (0x109B8, 'X'),
+ (0x109BC, 'V'),
+ (0x109D0, 'X'),
+ (0x109D2, 'V'),
+ (0x10A04, 'X'),
+ (0x10A05, 'V'),
+ (0x10A07, 'X'),
+ (0x10A0C, 'V'),
+ (0x10A14, 'X'),
+ (0x10A15, 'V'),
+ (0x10A18, 'X'),
+ (0x10A19, 'V'),
+ (0x10A36, 'X'),
+ (0x10A38, 'V'),
+ (0x10A3B, 'X'),
+ (0x10A3F, 'V'),
+ (0x10A49, 'X'),
+ (0x10A50, 'V'),
+ (0x10A59, 'X'),
+ (0x10A60, 'V'),
+ (0x10AA0, 'X'),
+ (0x10AC0, 'V'),
+ (0x10AE7, 'X'),
+ (0x10AEB, 'V'),
+ (0x10AF7, 'X'),
+ (0x10B00, 'V'),
+ ]
+
+def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x10B36, 'X'),
+ (0x10B39, 'V'),
+ (0x10B56, 'X'),
+ (0x10B58, 'V'),
+ (0x10B73, 'X'),
+ (0x10B78, 'V'),
+ (0x10B92, 'X'),
+ (0x10B99, 'V'),
+ (0x10B9D, 'X'),
+ (0x10BA9, 'V'),
+ (0x10BB0, 'X'),
+ (0x10C00, 'V'),
+ (0x10C49, 'X'),
+ (0x10C80, 'M', '𐳀'),
+ (0x10C81, 'M', '𐳁'),
+ (0x10C82, 'M', '𐳂'),
+ (0x10C83, 'M', '𐳃'),
+ (0x10C84, 'M', '𐳄'),
+ (0x10C85, 'M', '𐳅'),
+ (0x10C86, 'M', '𐳆'),
+ (0x10C87, 'M', '𐳇'),
+ (0x10C88, 'M', '𐳈'),
+ (0x10C89, 'M', '𐳉'),
+ (0x10C8A, 'M', '𐳊'),
+ (0x10C8B, 'M', '𐳋'),
+ (0x10C8C, 'M', '𐳌'),
+ (0x10C8D, 'M', '𐳍'),
+ (0x10C8E, 'M', '𐳎'),
+ (0x10C8F, 'M', '𐳏'),
+ (0x10C90, 'M', '𐳐'),
+ (0x10C91, 'M', '𐳑'),
+ (0x10C92, 'M', '𐳒'),
+ (0x10C93, 'M', '𐳓'),
+ (0x10C94, 'M', '𐳔'),
+ (0x10C95, 'M', '𐳕'),
+ (0x10C96, 'M', '𐳖'),
+ (0x10C97, 'M', '𐳗'),
+ (0x10C98, 'M', '𐳘'),
+ (0x10C99, 'M', '𐳙'),
+ (0x10C9A, 'M', '𐳚'),
+ (0x10C9B, 'M', '𐳛'),
+ (0x10C9C, 'M', '𐳜'),
+ (0x10C9D, 'M', '𐳝'),
+ (0x10C9E, 'M', '𐳞'),
+ (0x10C9F, 'M', '𐳟'),
+ (0x10CA0, 'M', '𐳠'),
+ (0x10CA1, 'M', '𐳡'),
+ (0x10CA2, 'M', '𐳢'),
+ (0x10CA3, 'M', '𐳣'),
+ (0x10CA4, 'M', '𐳤'),
+ (0x10CA5, 'M', '𐳥'),
+ (0x10CA6, 'M', '𐳦'),
+ (0x10CA7, 'M', '𐳧'),
+ (0x10CA8, 'M', '𐳨'),
+ (0x10CA9, 'M', '𐳩'),
+ (0x10CAA, 'M', '𐳪'),
+ (0x10CAB, 'M', '𐳫'),
+ (0x10CAC, 'M', '𐳬'),
+ (0x10CAD, 'M', '𐳭'),
+ (0x10CAE, 'M', '𐳮'),
+ (0x10CAF, 'M', '𐳯'),
+ (0x10CB0, 'M', '𐳰'),
+ (0x10CB1, 'M', '𐳱'),
+ (0x10CB2, 'M', '𐳲'),
+ (0x10CB3, 'X'),
+ (0x10CC0, 'V'),
+ (0x10CF3, 'X'),
+ (0x10CFA, 'V'),
+ (0x10D28, 'X'),
+ (0x10D30, 'V'),
+ (0x10D3A, 'X'),
+ (0x10E60, 'V'),
+ (0x10E7F, 'X'),
+ (0x10E80, 'V'),
+ (0x10EAA, 'X'),
+ (0x10EAB, 'V'),
+ (0x10EAE, 'X'),
+ (0x10EB0, 'V'),
+ (0x10EB2, 'X'),
+ (0x10EFD, 'V'),
+ (0x10F28, 'X'),
+ (0x10F30, 'V'),
+ (0x10F5A, 'X'),
+ (0x10F70, 'V'),
+ (0x10F8A, 'X'),
+ (0x10FB0, 'V'),
+ (0x10FCC, 'X'),
+ (0x10FE0, 'V'),
+ (0x10FF7, 'X'),
+ (0x11000, 'V'),
+ (0x1104E, 'X'),
+ (0x11052, 'V'),
+ (0x11076, 'X'),
+ (0x1107F, 'V'),
+ (0x110BD, 'X'),
+ (0x110BE, 'V'),
+ (0x110C3, 'X'),
+ (0x110D0, 'V'),
+ (0x110E9, 'X'),
+ (0x110F0, 'V'),
+ ]
+
+def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x110FA, 'X'),
+ (0x11100, 'V'),
+ (0x11135, 'X'),
+ (0x11136, 'V'),
+ (0x11148, 'X'),
+ (0x11150, 'V'),
+ (0x11177, 'X'),
+ (0x11180, 'V'),
+ (0x111E0, 'X'),
+ (0x111E1, 'V'),
+ (0x111F5, 'X'),
+ (0x11200, 'V'),
+ (0x11212, 'X'),
+ (0x11213, 'V'),
+ (0x11242, 'X'),
+ (0x11280, 'V'),
+ (0x11287, 'X'),
+ (0x11288, 'V'),
+ (0x11289, 'X'),
+ (0x1128A, 'V'),
+ (0x1128E, 'X'),
+ (0x1128F, 'V'),
+ (0x1129E, 'X'),
+ (0x1129F, 'V'),
+ (0x112AA, 'X'),
+ (0x112B0, 'V'),
+ (0x112EB, 'X'),
+ (0x112F0, 'V'),
+ (0x112FA, 'X'),
+ (0x11300, 'V'),
+ (0x11304, 'X'),
+ (0x11305, 'V'),
+ (0x1130D, 'X'),
+ (0x1130F, 'V'),
+ (0x11311, 'X'),
+ (0x11313, 'V'),
+ (0x11329, 'X'),
+ (0x1132A, 'V'),
+ (0x11331, 'X'),
+ (0x11332, 'V'),
+ (0x11334, 'X'),
+ (0x11335, 'V'),
+ (0x1133A, 'X'),
+ (0x1133B, 'V'),
+ (0x11345, 'X'),
+ (0x11347, 'V'),
+ (0x11349, 'X'),
+ (0x1134B, 'V'),
+ (0x1134E, 'X'),
+ (0x11350, 'V'),
+ (0x11351, 'X'),
+ (0x11357, 'V'),
+ (0x11358, 'X'),
+ (0x1135D, 'V'),
+ (0x11364, 'X'),
+ (0x11366, 'V'),
+ (0x1136D, 'X'),
+ (0x11370, 'V'),
+ (0x11375, 'X'),
+ (0x11400, 'V'),
+ (0x1145C, 'X'),
+ (0x1145D, 'V'),
+ (0x11462, 'X'),
+ (0x11480, 'V'),
+ (0x114C8, 'X'),
+ (0x114D0, 'V'),
+ (0x114DA, 'X'),
+ (0x11580, 'V'),
+ (0x115B6, 'X'),
+ (0x115B8, 'V'),
+ (0x115DE, 'X'),
+ (0x11600, 'V'),
+ (0x11645, 'X'),
+ (0x11650, 'V'),
+ (0x1165A, 'X'),
+ (0x11660, 'V'),
+ (0x1166D, 'X'),
+ (0x11680, 'V'),
+ (0x116BA, 'X'),
+ (0x116C0, 'V'),
+ (0x116CA, 'X'),
+ (0x11700, 'V'),
+ (0x1171B, 'X'),
+ (0x1171D, 'V'),
+ (0x1172C, 'X'),
+ (0x11730, 'V'),
+ (0x11747, 'X'),
+ (0x11800, 'V'),
+ (0x1183C, 'X'),
+ (0x118A0, 'M', '𑣀'),
+ (0x118A1, 'M', '𑣁'),
+ (0x118A2, 'M', '𑣂'),
+ (0x118A3, 'M', '𑣃'),
+ (0x118A4, 'M', '𑣄'),
+ (0x118A5, 'M', '𑣅'),
+ (0x118A6, 'M', '𑣆'),
+ (0x118A7, 'M', '𑣇'),
+ (0x118A8, 'M', '𑣈'),
+ (0x118A9, 'M', '𑣉'),
+ (0x118AA, 'M', '𑣊'),
+ ]
+
+def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x118AB, 'M', '𑣋'),
+ (0x118AC, 'M', '𑣌'),
+ (0x118AD, 'M', '𑣍'),
+ (0x118AE, 'M', '𑣎'),
+ (0x118AF, 'M', '𑣏'),
+ (0x118B0, 'M', '𑣐'),
+ (0x118B1, 'M', '𑣑'),
+ (0x118B2, 'M', '𑣒'),
+ (0x118B3, 'M', '𑣓'),
+ (0x118B4, 'M', '𑣔'),
+ (0x118B5, 'M', '𑣕'),
+ (0x118B6, 'M', '𑣖'),
+ (0x118B7, 'M', '𑣗'),
+ (0x118B8, 'M', '𑣘'),
+ (0x118B9, 'M', '𑣙'),
+ (0x118BA, 'M', '𑣚'),
+ (0x118BB, 'M', '𑣛'),
+ (0x118BC, 'M', '𑣜'),
+ (0x118BD, 'M', '𑣝'),
+ (0x118BE, 'M', '𑣞'),
+ (0x118BF, 'M', '𑣟'),
+ (0x118C0, 'V'),
+ (0x118F3, 'X'),
+ (0x118FF, 'V'),
+ (0x11907, 'X'),
+ (0x11909, 'V'),
+ (0x1190A, 'X'),
+ (0x1190C, 'V'),
+ (0x11914, 'X'),
+ (0x11915, 'V'),
+ (0x11917, 'X'),
+ (0x11918, 'V'),
+ (0x11936, 'X'),
+ (0x11937, 'V'),
+ (0x11939, 'X'),
+ (0x1193B, 'V'),
+ (0x11947, 'X'),
+ (0x11950, 'V'),
+ (0x1195A, 'X'),
+ (0x119A0, 'V'),
+ (0x119A8, 'X'),
+ (0x119AA, 'V'),
+ (0x119D8, 'X'),
+ (0x119DA, 'V'),
+ (0x119E5, 'X'),
+ (0x11A00, 'V'),
+ (0x11A48, 'X'),
+ (0x11A50, 'V'),
+ (0x11AA3, 'X'),
+ (0x11AB0, 'V'),
+ (0x11AF9, 'X'),
+ (0x11B00, 'V'),
+ (0x11B0A, 'X'),
+ (0x11C00, 'V'),
+ (0x11C09, 'X'),
+ (0x11C0A, 'V'),
+ (0x11C37, 'X'),
+ (0x11C38, 'V'),
+ (0x11C46, 'X'),
+ (0x11C50, 'V'),
+ (0x11C6D, 'X'),
+ (0x11C70, 'V'),
+ (0x11C90, 'X'),
+ (0x11C92, 'V'),
+ (0x11CA8, 'X'),
+ (0x11CA9, 'V'),
+ (0x11CB7, 'X'),
+ (0x11D00, 'V'),
+ (0x11D07, 'X'),
+ (0x11D08, 'V'),
+ (0x11D0A, 'X'),
+ (0x11D0B, 'V'),
+ (0x11D37, 'X'),
+ (0x11D3A, 'V'),
+ (0x11D3B, 'X'),
+ (0x11D3C, 'V'),
+ (0x11D3E, 'X'),
+ (0x11D3F, 'V'),
+ (0x11D48, 'X'),
+ (0x11D50, 'V'),
+ (0x11D5A, 'X'),
+ (0x11D60, 'V'),
+ (0x11D66, 'X'),
+ (0x11D67, 'V'),
+ (0x11D69, 'X'),
+ (0x11D6A, 'V'),
+ (0x11D8F, 'X'),
+ (0x11D90, 'V'),
+ (0x11D92, 'X'),
+ (0x11D93, 'V'),
+ (0x11D99, 'X'),
+ (0x11DA0, 'V'),
+ (0x11DAA, 'X'),
+ (0x11EE0, 'V'),
+ (0x11EF9, 'X'),
+ (0x11F00, 'V'),
+ (0x11F11, 'X'),
+ (0x11F12, 'V'),
+ (0x11F3B, 'X'),
+ (0x11F3E, 'V'),
+ ]
+
+def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x11F5A, 'X'),
+ (0x11FB0, 'V'),
+ (0x11FB1, 'X'),
+ (0x11FC0, 'V'),
+ (0x11FF2, 'X'),
+ (0x11FFF, 'V'),
+ (0x1239A, 'X'),
+ (0x12400, 'V'),
+ (0x1246F, 'X'),
+ (0x12470, 'V'),
+ (0x12475, 'X'),
+ (0x12480, 'V'),
+ (0x12544, 'X'),
+ (0x12F90, 'V'),
+ (0x12FF3, 'X'),
+ (0x13000, 'V'),
+ (0x13430, 'X'),
+ (0x13440, 'V'),
+ (0x13456, 'X'),
+ (0x14400, 'V'),
+ (0x14647, 'X'),
+ (0x16800, 'V'),
+ (0x16A39, 'X'),
+ (0x16A40, 'V'),
+ (0x16A5F, 'X'),
+ (0x16A60, 'V'),
+ (0x16A6A, 'X'),
+ (0x16A6E, 'V'),
+ (0x16ABF, 'X'),
+ (0x16AC0, 'V'),
+ (0x16ACA, 'X'),
+ (0x16AD0, 'V'),
+ (0x16AEE, 'X'),
+ (0x16AF0, 'V'),
+ (0x16AF6, 'X'),
+ (0x16B00, 'V'),
+ (0x16B46, 'X'),
+ (0x16B50, 'V'),
+ (0x16B5A, 'X'),
+ (0x16B5B, 'V'),
+ (0x16B62, 'X'),
+ (0x16B63, 'V'),
+ (0x16B78, 'X'),
+ (0x16B7D, 'V'),
+ (0x16B90, 'X'),
+ (0x16E40, 'M', '𖹠'),
+ (0x16E41, 'M', '𖹡'),
+ (0x16E42, 'M', '𖹢'),
+ (0x16E43, 'M', '𖹣'),
+ (0x16E44, 'M', '𖹤'),
+ (0x16E45, 'M', '𖹥'),
+ (0x16E46, 'M', '𖹦'),
+ (0x16E47, 'M', '𖹧'),
+ (0x16E48, 'M', '𖹨'),
+ (0x16E49, 'M', '𖹩'),
+ (0x16E4A, 'M', '𖹪'),
+ (0x16E4B, 'M', '𖹫'),
+ (0x16E4C, 'M', '𖹬'),
+ (0x16E4D, 'M', '𖹭'),
+ (0x16E4E, 'M', '𖹮'),
+ (0x16E4F, 'M', '𖹯'),
+ (0x16E50, 'M', '𖹰'),
+ (0x16E51, 'M', '𖹱'),
+ (0x16E52, 'M', '𖹲'),
+ (0x16E53, 'M', '𖹳'),
+ (0x16E54, 'M', '𖹴'),
+ (0x16E55, 'M', '𖹵'),
+ (0x16E56, 'M', '𖹶'),
+ (0x16E57, 'M', '𖹷'),
+ (0x16E58, 'M', '𖹸'),
+ (0x16E59, 'M', '𖹹'),
+ (0x16E5A, 'M', '𖹺'),
+ (0x16E5B, 'M', '𖹻'),
+ (0x16E5C, 'M', '𖹼'),
+ (0x16E5D, 'M', '𖹽'),
+ (0x16E5E, 'M', '𖹾'),
+ (0x16E5F, 'M', '𖹿'),
+ (0x16E60, 'V'),
+ (0x16E9B, 'X'),
+ (0x16F00, 'V'),
+ (0x16F4B, 'X'),
+ (0x16F4F, 'V'),
+ (0x16F88, 'X'),
+ (0x16F8F, 'V'),
+ (0x16FA0, 'X'),
+ (0x16FE0, 'V'),
+ (0x16FE5, 'X'),
+ (0x16FF0, 'V'),
+ (0x16FF2, 'X'),
+ (0x17000, 'V'),
+ (0x187F8, 'X'),
+ (0x18800, 'V'),
+ (0x18CD6, 'X'),
+ (0x18D00, 'V'),
+ (0x18D09, 'X'),
+ (0x1AFF0, 'V'),
+ (0x1AFF4, 'X'),
+ (0x1AFF5, 'V'),
+ (0x1AFFC, 'X'),
+ (0x1AFFD, 'V'),
+ ]
+
+def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1AFFF, 'X'),
+ (0x1B000, 'V'),
+ (0x1B123, 'X'),
+ (0x1B132, 'V'),
+ (0x1B133, 'X'),
+ (0x1B150, 'V'),
+ (0x1B153, 'X'),
+ (0x1B155, 'V'),
+ (0x1B156, 'X'),
+ (0x1B164, 'V'),
+ (0x1B168, 'X'),
+ (0x1B170, 'V'),
+ (0x1B2FC, 'X'),
+ (0x1BC00, 'V'),
+ (0x1BC6B, 'X'),
+ (0x1BC70, 'V'),
+ (0x1BC7D, 'X'),
+ (0x1BC80, 'V'),
+ (0x1BC89, 'X'),
+ (0x1BC90, 'V'),
+ (0x1BC9A, 'X'),
+ (0x1BC9C, 'V'),
+ (0x1BCA0, 'I'),
+ (0x1BCA4, 'X'),
+ (0x1CF00, 'V'),
+ (0x1CF2E, 'X'),
+ (0x1CF30, 'V'),
+ (0x1CF47, 'X'),
+ (0x1CF50, 'V'),
+ (0x1CFC4, 'X'),
+ (0x1D000, 'V'),
+ (0x1D0F6, 'X'),
+ (0x1D100, 'V'),
+ (0x1D127, 'X'),
+ (0x1D129, 'V'),
+ (0x1D15E, 'M', '𝅗𝅥'),
+ (0x1D15F, 'M', '𝅘𝅥'),
+ (0x1D160, 'M', '𝅘𝅥𝅮'),
+ (0x1D161, 'M', '𝅘𝅥𝅯'),
+ (0x1D162, 'M', '𝅘𝅥𝅰'),
+ (0x1D163, 'M', '𝅘𝅥𝅱'),
+ (0x1D164, 'M', '𝅘𝅥𝅲'),
+ (0x1D165, 'V'),
+ (0x1D173, 'X'),
+ (0x1D17B, 'V'),
+ (0x1D1BB, 'M', '𝆹𝅥'),
+ (0x1D1BC, 'M', '𝆺𝅥'),
+ (0x1D1BD, 'M', '𝆹𝅥𝅮'),
+ (0x1D1BE, 'M', '𝆺𝅥𝅮'),
+ (0x1D1BF, 'M', '𝆹𝅥𝅯'),
+ (0x1D1C0, 'M', '𝆺𝅥𝅯'),
+ (0x1D1C1, 'V'),
+ (0x1D1EB, 'X'),
+ (0x1D200, 'V'),
+ (0x1D246, 'X'),
+ (0x1D2C0, 'V'),
+ (0x1D2D4, 'X'),
+ (0x1D2E0, 'V'),
+ (0x1D2F4, 'X'),
+ (0x1D300, 'V'),
+ (0x1D357, 'X'),
+ (0x1D360, 'V'),
+ (0x1D379, 'X'),
+ (0x1D400, 'M', 'a'),
+ (0x1D401, 'M', 'b'),
+ (0x1D402, 'M', 'c'),
+ (0x1D403, 'M', 'd'),
+ (0x1D404, 'M', 'e'),
+ (0x1D405, 'M', 'f'),
+ (0x1D406, 'M', 'g'),
+ (0x1D407, 'M', 'h'),
+ (0x1D408, 'M', 'i'),
+ (0x1D409, 'M', 'j'),
+ (0x1D40A, 'M', 'k'),
+ (0x1D40B, 'M', 'l'),
+ (0x1D40C, 'M', 'm'),
+ (0x1D40D, 'M', 'n'),
+ (0x1D40E, 'M', 'o'),
+ (0x1D40F, 'M', 'p'),
+ (0x1D410, 'M', 'q'),
+ (0x1D411, 'M', 'r'),
+ (0x1D412, 'M', 's'),
+ (0x1D413, 'M', 't'),
+ (0x1D414, 'M', 'u'),
+ (0x1D415, 'M', 'v'),
+ (0x1D416, 'M', 'w'),
+ (0x1D417, 'M', 'x'),
+ (0x1D418, 'M', 'y'),
+ (0x1D419, 'M', 'z'),
+ (0x1D41A, 'M', 'a'),
+ (0x1D41B, 'M', 'b'),
+ (0x1D41C, 'M', 'c'),
+ (0x1D41D, 'M', 'd'),
+ (0x1D41E, 'M', 'e'),
+ (0x1D41F, 'M', 'f'),
+ (0x1D420, 'M', 'g'),
+ (0x1D421, 'M', 'h'),
+ (0x1D422, 'M', 'i'),
+ (0x1D423, 'M', 'j'),
+ (0x1D424, 'M', 'k'),
+ ]
+
+def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D425, 'M', 'l'),
+ (0x1D426, 'M', 'm'),
+ (0x1D427, 'M', 'n'),
+ (0x1D428, 'M', 'o'),
+ (0x1D429, 'M', 'p'),
+ (0x1D42A, 'M', 'q'),
+ (0x1D42B, 'M', 'r'),
+ (0x1D42C, 'M', 's'),
+ (0x1D42D, 'M', 't'),
+ (0x1D42E, 'M', 'u'),
+ (0x1D42F, 'M', 'v'),
+ (0x1D430, 'M', 'w'),
+ (0x1D431, 'M', 'x'),
+ (0x1D432, 'M', 'y'),
+ (0x1D433, 'M', 'z'),
+ (0x1D434, 'M', 'a'),
+ (0x1D435, 'M', 'b'),
+ (0x1D436, 'M', 'c'),
+ (0x1D437, 'M', 'd'),
+ (0x1D438, 'M', 'e'),
+ (0x1D439, 'M', 'f'),
+ (0x1D43A, 'M', 'g'),
+ (0x1D43B, 'M', 'h'),
+ (0x1D43C, 'M', 'i'),
+ (0x1D43D, 'M', 'j'),
+ (0x1D43E, 'M', 'k'),
+ (0x1D43F, 'M', 'l'),
+ (0x1D440, 'M', 'm'),
+ (0x1D441, 'M', 'n'),
+ (0x1D442, 'M', 'o'),
+ (0x1D443, 'M', 'p'),
+ (0x1D444, 'M', 'q'),
+ (0x1D445, 'M', 'r'),
+ (0x1D446, 'M', 's'),
+ (0x1D447, 'M', 't'),
+ (0x1D448, 'M', 'u'),
+ (0x1D449, 'M', 'v'),
+ (0x1D44A, 'M', 'w'),
+ (0x1D44B, 'M', 'x'),
+ (0x1D44C, 'M', 'y'),
+ (0x1D44D, 'M', 'z'),
+ (0x1D44E, 'M', 'a'),
+ (0x1D44F, 'M', 'b'),
+ (0x1D450, 'M', 'c'),
+ (0x1D451, 'M', 'd'),
+ (0x1D452, 'M', 'e'),
+ (0x1D453, 'M', 'f'),
+ (0x1D454, 'M', 'g'),
+ (0x1D455, 'X'),
+ (0x1D456, 'M', 'i'),
+ (0x1D457, 'M', 'j'),
+ (0x1D458, 'M', 'k'),
+ (0x1D459, 'M', 'l'),
+ (0x1D45A, 'M', 'm'),
+ (0x1D45B, 'M', 'n'),
+ (0x1D45C, 'M', 'o'),
+ (0x1D45D, 'M', 'p'),
+ (0x1D45E, 'M', 'q'),
+ (0x1D45F, 'M', 'r'),
+ (0x1D460, 'M', 's'),
+ (0x1D461, 'M', 't'),
+ (0x1D462, 'M', 'u'),
+ (0x1D463, 'M', 'v'),
+ (0x1D464, 'M', 'w'),
+ (0x1D465, 'M', 'x'),
+ (0x1D466, 'M', 'y'),
+ (0x1D467, 'M', 'z'),
+ (0x1D468, 'M', 'a'),
+ (0x1D469, 'M', 'b'),
+ (0x1D46A, 'M', 'c'),
+ (0x1D46B, 'M', 'd'),
+ (0x1D46C, 'M', 'e'),
+ (0x1D46D, 'M', 'f'),
+ (0x1D46E, 'M', 'g'),
+ (0x1D46F, 'M', 'h'),
+ (0x1D470, 'M', 'i'),
+ (0x1D471, 'M', 'j'),
+ (0x1D472, 'M', 'k'),
+ (0x1D473, 'M', 'l'),
+ (0x1D474, 'M', 'm'),
+ (0x1D475, 'M', 'n'),
+ (0x1D476, 'M', 'o'),
+ (0x1D477, 'M', 'p'),
+ (0x1D478, 'M', 'q'),
+ (0x1D479, 'M', 'r'),
+ (0x1D47A, 'M', 's'),
+ (0x1D47B, 'M', 't'),
+ (0x1D47C, 'M', 'u'),
+ (0x1D47D, 'M', 'v'),
+ (0x1D47E, 'M', 'w'),
+ (0x1D47F, 'M', 'x'),
+ (0x1D480, 'M', 'y'),
+ (0x1D481, 'M', 'z'),
+ (0x1D482, 'M', 'a'),
+ (0x1D483, 'M', 'b'),
+ (0x1D484, 'M', 'c'),
+ (0x1D485, 'M', 'd'),
+ (0x1D486, 'M', 'e'),
+ (0x1D487, 'M', 'f'),
+ (0x1D488, 'M', 'g'),
+ ]
+
+def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D489, 'M', 'h'),
+ (0x1D48A, 'M', 'i'),
+ (0x1D48B, 'M', 'j'),
+ (0x1D48C, 'M', 'k'),
+ (0x1D48D, 'M', 'l'),
+ (0x1D48E, 'M', 'm'),
+ (0x1D48F, 'M', 'n'),
+ (0x1D490, 'M', 'o'),
+ (0x1D491, 'M', 'p'),
+ (0x1D492, 'M', 'q'),
+ (0x1D493, 'M', 'r'),
+ (0x1D494, 'M', 's'),
+ (0x1D495, 'M', 't'),
+ (0x1D496, 'M', 'u'),
+ (0x1D497, 'M', 'v'),
+ (0x1D498, 'M', 'w'),
+ (0x1D499, 'M', 'x'),
+ (0x1D49A, 'M', 'y'),
+ (0x1D49B, 'M', 'z'),
+ (0x1D49C, 'M', 'a'),
+ (0x1D49D, 'X'),
+ (0x1D49E, 'M', 'c'),
+ (0x1D49F, 'M', 'd'),
+ (0x1D4A0, 'X'),
+ (0x1D4A2, 'M', 'g'),
+ (0x1D4A3, 'X'),
+ (0x1D4A5, 'M', 'j'),
+ (0x1D4A6, 'M', 'k'),
+ (0x1D4A7, 'X'),
+ (0x1D4A9, 'M', 'n'),
+ (0x1D4AA, 'M', 'o'),
+ (0x1D4AB, 'M', 'p'),
+ (0x1D4AC, 'M', 'q'),
+ (0x1D4AD, 'X'),
+ (0x1D4AE, 'M', 's'),
+ (0x1D4AF, 'M', 't'),
+ (0x1D4B0, 'M', 'u'),
+ (0x1D4B1, 'M', 'v'),
+ (0x1D4B2, 'M', 'w'),
+ (0x1D4B3, 'M', 'x'),
+ (0x1D4B4, 'M', 'y'),
+ (0x1D4B5, 'M', 'z'),
+ (0x1D4B6, 'M', 'a'),
+ (0x1D4B7, 'M', 'b'),
+ (0x1D4B8, 'M', 'c'),
+ (0x1D4B9, 'M', 'd'),
+ (0x1D4BA, 'X'),
+ (0x1D4BB, 'M', 'f'),
+ (0x1D4BC, 'X'),
+ (0x1D4BD, 'M', 'h'),
+ (0x1D4BE, 'M', 'i'),
+ (0x1D4BF, 'M', 'j'),
+ (0x1D4C0, 'M', 'k'),
+ (0x1D4C1, 'M', 'l'),
+ (0x1D4C2, 'M', 'm'),
+ (0x1D4C3, 'M', 'n'),
+ (0x1D4C4, 'X'),
+ (0x1D4C5, 'M', 'p'),
+ (0x1D4C6, 'M', 'q'),
+ (0x1D4C7, 'M', 'r'),
+ (0x1D4C8, 'M', 's'),
+ (0x1D4C9, 'M', 't'),
+ (0x1D4CA, 'M', 'u'),
+ (0x1D4CB, 'M', 'v'),
+ (0x1D4CC, 'M', 'w'),
+ (0x1D4CD, 'M', 'x'),
+ (0x1D4CE, 'M', 'y'),
+ (0x1D4CF, 'M', 'z'),
+ (0x1D4D0, 'M', 'a'),
+ (0x1D4D1, 'M', 'b'),
+ (0x1D4D2, 'M', 'c'),
+ (0x1D4D3, 'M', 'd'),
+ (0x1D4D4, 'M', 'e'),
+ (0x1D4D5, 'M', 'f'),
+ (0x1D4D6, 'M', 'g'),
+ (0x1D4D7, 'M', 'h'),
+ (0x1D4D8, 'M', 'i'),
+ (0x1D4D9, 'M', 'j'),
+ (0x1D4DA, 'M', 'k'),
+ (0x1D4DB, 'M', 'l'),
+ (0x1D4DC, 'M', 'm'),
+ (0x1D4DD, 'M', 'n'),
+ (0x1D4DE, 'M', 'o'),
+ (0x1D4DF, 'M', 'p'),
+ (0x1D4E0, 'M', 'q'),
+ (0x1D4E1, 'M', 'r'),
+ (0x1D4E2, 'M', 's'),
+ (0x1D4E3, 'M', 't'),
+ (0x1D4E4, 'M', 'u'),
+ (0x1D4E5, 'M', 'v'),
+ (0x1D4E6, 'M', 'w'),
+ (0x1D4E7, 'M', 'x'),
+ (0x1D4E8, 'M', 'y'),
+ (0x1D4E9, 'M', 'z'),
+ (0x1D4EA, 'M', 'a'),
+ (0x1D4EB, 'M', 'b'),
+ (0x1D4EC, 'M', 'c'),
+ (0x1D4ED, 'M', 'd'),
+ (0x1D4EE, 'M', 'e'),
+ (0x1D4EF, 'M', 'f'),
+ ]
+
+def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D4F0, 'M', 'g'),
+ (0x1D4F1, 'M', 'h'),
+ (0x1D4F2, 'M', 'i'),
+ (0x1D4F3, 'M', 'j'),
+ (0x1D4F4, 'M', 'k'),
+ (0x1D4F5, 'M', 'l'),
+ (0x1D4F6, 'M', 'm'),
+ (0x1D4F7, 'M', 'n'),
+ (0x1D4F8, 'M', 'o'),
+ (0x1D4F9, 'M', 'p'),
+ (0x1D4FA, 'M', 'q'),
+ (0x1D4FB, 'M', 'r'),
+ (0x1D4FC, 'M', 's'),
+ (0x1D4FD, 'M', 't'),
+ (0x1D4FE, 'M', 'u'),
+ (0x1D4FF, 'M', 'v'),
+ (0x1D500, 'M', 'w'),
+ (0x1D501, 'M', 'x'),
+ (0x1D502, 'M', 'y'),
+ (0x1D503, 'M', 'z'),
+ (0x1D504, 'M', 'a'),
+ (0x1D505, 'M', 'b'),
+ (0x1D506, 'X'),
+ (0x1D507, 'M', 'd'),
+ (0x1D508, 'M', 'e'),
+ (0x1D509, 'M', 'f'),
+ (0x1D50A, 'M', 'g'),
+ (0x1D50B, 'X'),
+ (0x1D50D, 'M', 'j'),
+ (0x1D50E, 'M', 'k'),
+ (0x1D50F, 'M', 'l'),
+ (0x1D510, 'M', 'm'),
+ (0x1D511, 'M', 'n'),
+ (0x1D512, 'M', 'o'),
+ (0x1D513, 'M', 'p'),
+ (0x1D514, 'M', 'q'),
+ (0x1D515, 'X'),
+ (0x1D516, 'M', 's'),
+ (0x1D517, 'M', 't'),
+ (0x1D518, 'M', 'u'),
+ (0x1D519, 'M', 'v'),
+ (0x1D51A, 'M', 'w'),
+ (0x1D51B, 'M', 'x'),
+ (0x1D51C, 'M', 'y'),
+ (0x1D51D, 'X'),
+ (0x1D51E, 'M', 'a'),
+ (0x1D51F, 'M', 'b'),
+ (0x1D520, 'M', 'c'),
+ (0x1D521, 'M', 'd'),
+ (0x1D522, 'M', 'e'),
+ (0x1D523, 'M', 'f'),
+ (0x1D524, 'M', 'g'),
+ (0x1D525, 'M', 'h'),
+ (0x1D526, 'M', 'i'),
+ (0x1D527, 'M', 'j'),
+ (0x1D528, 'M', 'k'),
+ (0x1D529, 'M', 'l'),
+ (0x1D52A, 'M', 'm'),
+ (0x1D52B, 'M', 'n'),
+ (0x1D52C, 'M', 'o'),
+ (0x1D52D, 'M', 'p'),
+ (0x1D52E, 'M', 'q'),
+ (0x1D52F, 'M', 'r'),
+ (0x1D530, 'M', 's'),
+ (0x1D531, 'M', 't'),
+ (0x1D532, 'M', 'u'),
+ (0x1D533, 'M', 'v'),
+ (0x1D534, 'M', 'w'),
+ (0x1D535, 'M', 'x'),
+ (0x1D536, 'M', 'y'),
+ (0x1D537, 'M', 'z'),
+ (0x1D538, 'M', 'a'),
+ (0x1D539, 'M', 'b'),
+ (0x1D53A, 'X'),
+ (0x1D53B, 'M', 'd'),
+ (0x1D53C, 'M', 'e'),
+ (0x1D53D, 'M', 'f'),
+ (0x1D53E, 'M', 'g'),
+ (0x1D53F, 'X'),
+ (0x1D540, 'M', 'i'),
+ (0x1D541, 'M', 'j'),
+ (0x1D542, 'M', 'k'),
+ (0x1D543, 'M', 'l'),
+ (0x1D544, 'M', 'm'),
+ (0x1D545, 'X'),
+ (0x1D546, 'M', 'o'),
+ (0x1D547, 'X'),
+ (0x1D54A, 'M', 's'),
+ (0x1D54B, 'M', 't'),
+ (0x1D54C, 'M', 'u'),
+ (0x1D54D, 'M', 'v'),
+ (0x1D54E, 'M', 'w'),
+ (0x1D54F, 'M', 'x'),
+ (0x1D550, 'M', 'y'),
+ (0x1D551, 'X'),
+ (0x1D552, 'M', 'a'),
+ (0x1D553, 'M', 'b'),
+ (0x1D554, 'M', 'c'),
+ (0x1D555, 'M', 'd'),
+ (0x1D556, 'M', 'e'),
+ ]
+
+def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D557, 'M', 'f'),
+ (0x1D558, 'M', 'g'),
+ (0x1D559, 'M', 'h'),
+ (0x1D55A, 'M', 'i'),
+ (0x1D55B, 'M', 'j'),
+ (0x1D55C, 'M', 'k'),
+ (0x1D55D, 'M', 'l'),
+ (0x1D55E, 'M', 'm'),
+ (0x1D55F, 'M', 'n'),
+ (0x1D560, 'M', 'o'),
+ (0x1D561, 'M', 'p'),
+ (0x1D562, 'M', 'q'),
+ (0x1D563, 'M', 'r'),
+ (0x1D564, 'M', 's'),
+ (0x1D565, 'M', 't'),
+ (0x1D566, 'M', 'u'),
+ (0x1D567, 'M', 'v'),
+ (0x1D568, 'M', 'w'),
+ (0x1D569, 'M', 'x'),
+ (0x1D56A, 'M', 'y'),
+ (0x1D56B, 'M', 'z'),
+ (0x1D56C, 'M', 'a'),
+ (0x1D56D, 'M', 'b'),
+ (0x1D56E, 'M', 'c'),
+ (0x1D56F, 'M', 'd'),
+ (0x1D570, 'M', 'e'),
+ (0x1D571, 'M', 'f'),
+ (0x1D572, 'M', 'g'),
+ (0x1D573, 'M', 'h'),
+ (0x1D574, 'M', 'i'),
+ (0x1D575, 'M', 'j'),
+ (0x1D576, 'M', 'k'),
+ (0x1D577, 'M', 'l'),
+ (0x1D578, 'M', 'm'),
+ (0x1D579, 'M', 'n'),
+ (0x1D57A, 'M', 'o'),
+ (0x1D57B, 'M', 'p'),
+ (0x1D57C, 'M', 'q'),
+ (0x1D57D, 'M', 'r'),
+ (0x1D57E, 'M', 's'),
+ (0x1D57F, 'M', 't'),
+ (0x1D580, 'M', 'u'),
+ (0x1D581, 'M', 'v'),
+ (0x1D582, 'M', 'w'),
+ (0x1D583, 'M', 'x'),
+ (0x1D584, 'M', 'y'),
+ (0x1D585, 'M', 'z'),
+ (0x1D586, 'M', 'a'),
+ (0x1D587, 'M', 'b'),
+ (0x1D588, 'M', 'c'),
+ (0x1D589, 'M', 'd'),
+ (0x1D58A, 'M', 'e'),
+ (0x1D58B, 'M', 'f'),
+ (0x1D58C, 'M', 'g'),
+ (0x1D58D, 'M', 'h'),
+ (0x1D58E, 'M', 'i'),
+ (0x1D58F, 'M', 'j'),
+ (0x1D590, 'M', 'k'),
+ (0x1D591, 'M', 'l'),
+ (0x1D592, 'M', 'm'),
+ (0x1D593, 'M', 'n'),
+ (0x1D594, 'M', 'o'),
+ (0x1D595, 'M', 'p'),
+ (0x1D596, 'M', 'q'),
+ (0x1D597, 'M', 'r'),
+ (0x1D598, 'M', 's'),
+ (0x1D599, 'M', 't'),
+ (0x1D59A, 'M', 'u'),
+ (0x1D59B, 'M', 'v'),
+ (0x1D59C, 'M', 'w'),
+ (0x1D59D, 'M', 'x'),
+ (0x1D59E, 'M', 'y'),
+ (0x1D59F, 'M', 'z'),
+ (0x1D5A0, 'M', 'a'),
+ (0x1D5A1, 'M', 'b'),
+ (0x1D5A2, 'M', 'c'),
+ (0x1D5A3, 'M', 'd'),
+ (0x1D5A4, 'M', 'e'),
+ (0x1D5A5, 'M', 'f'),
+ (0x1D5A6, 'M', 'g'),
+ (0x1D5A7, 'M', 'h'),
+ (0x1D5A8, 'M', 'i'),
+ (0x1D5A9, 'M', 'j'),
+ (0x1D5AA, 'M', 'k'),
+ (0x1D5AB, 'M', 'l'),
+ (0x1D5AC, 'M', 'm'),
+ (0x1D5AD, 'M', 'n'),
+ (0x1D5AE, 'M', 'o'),
+ (0x1D5AF, 'M', 'p'),
+ (0x1D5B0, 'M', 'q'),
+ (0x1D5B1, 'M', 'r'),
+ (0x1D5B2, 'M', 's'),
+ (0x1D5B3, 'M', 't'),
+ (0x1D5B4, 'M', 'u'),
+ (0x1D5B5, 'M', 'v'),
+ (0x1D5B6, 'M', 'w'),
+ (0x1D5B7, 'M', 'x'),
+ (0x1D5B8, 'M', 'y'),
+ (0x1D5B9, 'M', 'z'),
+ (0x1D5BA, 'M', 'a'),
+ ]
+
+def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D5BB, 'M', 'b'),
+ (0x1D5BC, 'M', 'c'),
+ (0x1D5BD, 'M', 'd'),
+ (0x1D5BE, 'M', 'e'),
+ (0x1D5BF, 'M', 'f'),
+ (0x1D5C0, 'M', 'g'),
+ (0x1D5C1, 'M', 'h'),
+ (0x1D5C2, 'M', 'i'),
+ (0x1D5C3, 'M', 'j'),
+ (0x1D5C4, 'M', 'k'),
+ (0x1D5C5, 'M', 'l'),
+ (0x1D5C6, 'M', 'm'),
+ (0x1D5C7, 'M', 'n'),
+ (0x1D5C8, 'M', 'o'),
+ (0x1D5C9, 'M', 'p'),
+ (0x1D5CA, 'M', 'q'),
+ (0x1D5CB, 'M', 'r'),
+ (0x1D5CC, 'M', 's'),
+ (0x1D5CD, 'M', 't'),
+ (0x1D5CE, 'M', 'u'),
+ (0x1D5CF, 'M', 'v'),
+ (0x1D5D0, 'M', 'w'),
+ (0x1D5D1, 'M', 'x'),
+ (0x1D5D2, 'M', 'y'),
+ (0x1D5D3, 'M', 'z'),
+ (0x1D5D4, 'M', 'a'),
+ (0x1D5D5, 'M', 'b'),
+ (0x1D5D6, 'M', 'c'),
+ (0x1D5D7, 'M', 'd'),
+ (0x1D5D8, 'M', 'e'),
+ (0x1D5D9, 'M', 'f'),
+ (0x1D5DA, 'M', 'g'),
+ (0x1D5DB, 'M', 'h'),
+ (0x1D5DC, 'M', 'i'),
+ (0x1D5DD, 'M', 'j'),
+ (0x1D5DE, 'M', 'k'),
+ (0x1D5DF, 'M', 'l'),
+ (0x1D5E0, 'M', 'm'),
+ (0x1D5E1, 'M', 'n'),
+ (0x1D5E2, 'M', 'o'),
+ (0x1D5E3, 'M', 'p'),
+ (0x1D5E4, 'M', 'q'),
+ (0x1D5E5, 'M', 'r'),
+ (0x1D5E6, 'M', 's'),
+ (0x1D5E7, 'M', 't'),
+ (0x1D5E8, 'M', 'u'),
+ (0x1D5E9, 'M', 'v'),
+ (0x1D5EA, 'M', 'w'),
+ (0x1D5EB, 'M', 'x'),
+ (0x1D5EC, 'M', 'y'),
+ (0x1D5ED, 'M', 'z'),
+ (0x1D5EE, 'M', 'a'),
+ (0x1D5EF, 'M', 'b'),
+ (0x1D5F0, 'M', 'c'),
+ (0x1D5F1, 'M', 'd'),
+ (0x1D5F2, 'M', 'e'),
+ (0x1D5F3, 'M', 'f'),
+ (0x1D5F4, 'M', 'g'),
+ (0x1D5F5, 'M', 'h'),
+ (0x1D5F6, 'M', 'i'),
+ (0x1D5F7, 'M', 'j'),
+ (0x1D5F8, 'M', 'k'),
+ (0x1D5F9, 'M', 'l'),
+ (0x1D5FA, 'M', 'm'),
+ (0x1D5FB, 'M', 'n'),
+ (0x1D5FC, 'M', 'o'),
+ (0x1D5FD, 'M', 'p'),
+ (0x1D5FE, 'M', 'q'),
+ (0x1D5FF, 'M', 'r'),
+ (0x1D600, 'M', 's'),
+ (0x1D601, 'M', 't'),
+ (0x1D602, 'M', 'u'),
+ (0x1D603, 'M', 'v'),
+ (0x1D604, 'M', 'w'),
+ (0x1D605, 'M', 'x'),
+ (0x1D606, 'M', 'y'),
+ (0x1D607, 'M', 'z'),
+ (0x1D608, 'M', 'a'),
+ (0x1D609, 'M', 'b'),
+ (0x1D60A, 'M', 'c'),
+ (0x1D60B, 'M', 'd'),
+ (0x1D60C, 'M', 'e'),
+ (0x1D60D, 'M', 'f'),
+ (0x1D60E, 'M', 'g'),
+ (0x1D60F, 'M', 'h'),
+ (0x1D610, 'M', 'i'),
+ (0x1D611, 'M', 'j'),
+ (0x1D612, 'M', 'k'),
+ (0x1D613, 'M', 'l'),
+ (0x1D614, 'M', 'm'),
+ (0x1D615, 'M', 'n'),
+ (0x1D616, 'M', 'o'),
+ (0x1D617, 'M', 'p'),
+ (0x1D618, 'M', 'q'),
+ (0x1D619, 'M', 'r'),
+ (0x1D61A, 'M', 's'),
+ (0x1D61B, 'M', 't'),
+ (0x1D61C, 'M', 'u'),
+ (0x1D61D, 'M', 'v'),
+ (0x1D61E, 'M', 'w'),
+ ]
+
+def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D61F, 'M', 'x'),
+ (0x1D620, 'M', 'y'),
+ (0x1D621, 'M', 'z'),
+ (0x1D622, 'M', 'a'),
+ (0x1D623, 'M', 'b'),
+ (0x1D624, 'M', 'c'),
+ (0x1D625, 'M', 'd'),
+ (0x1D626, 'M', 'e'),
+ (0x1D627, 'M', 'f'),
+ (0x1D628, 'M', 'g'),
+ (0x1D629, 'M', 'h'),
+ (0x1D62A, 'M', 'i'),
+ (0x1D62B, 'M', 'j'),
+ (0x1D62C, 'M', 'k'),
+ (0x1D62D, 'M', 'l'),
+ (0x1D62E, 'M', 'm'),
+ (0x1D62F, 'M', 'n'),
+ (0x1D630, 'M', 'o'),
+ (0x1D631, 'M', 'p'),
+ (0x1D632, 'M', 'q'),
+ (0x1D633, 'M', 'r'),
+ (0x1D634, 'M', 's'),
+ (0x1D635, 'M', 't'),
+ (0x1D636, 'M', 'u'),
+ (0x1D637, 'M', 'v'),
+ (0x1D638, 'M', 'w'),
+ (0x1D639, 'M', 'x'),
+ (0x1D63A, 'M', 'y'),
+ (0x1D63B, 'M', 'z'),
+ (0x1D63C, 'M', 'a'),
+ (0x1D63D, 'M', 'b'),
+ (0x1D63E, 'M', 'c'),
+ (0x1D63F, 'M', 'd'),
+ (0x1D640, 'M', 'e'),
+ (0x1D641, 'M', 'f'),
+ (0x1D642, 'M', 'g'),
+ (0x1D643, 'M', 'h'),
+ (0x1D644, 'M', 'i'),
+ (0x1D645, 'M', 'j'),
+ (0x1D646, 'M', 'k'),
+ (0x1D647, 'M', 'l'),
+ (0x1D648, 'M', 'm'),
+ (0x1D649, 'M', 'n'),
+ (0x1D64A, 'M', 'o'),
+ (0x1D64B, 'M', 'p'),
+ (0x1D64C, 'M', 'q'),
+ (0x1D64D, 'M', 'r'),
+ (0x1D64E, 'M', 's'),
+ (0x1D64F, 'M', 't'),
+ (0x1D650, 'M', 'u'),
+ (0x1D651, 'M', 'v'),
+ (0x1D652, 'M', 'w'),
+ (0x1D653, 'M', 'x'),
+ (0x1D654, 'M', 'y'),
+ (0x1D655, 'M', 'z'),
+ (0x1D656, 'M', 'a'),
+ (0x1D657, 'M', 'b'),
+ (0x1D658, 'M', 'c'),
+ (0x1D659, 'M', 'd'),
+ (0x1D65A, 'M', 'e'),
+ (0x1D65B, 'M', 'f'),
+ (0x1D65C, 'M', 'g'),
+ (0x1D65D, 'M', 'h'),
+ (0x1D65E, 'M', 'i'),
+ (0x1D65F, 'M', 'j'),
+ (0x1D660, 'M', 'k'),
+ (0x1D661, 'M', 'l'),
+ (0x1D662, 'M', 'm'),
+ (0x1D663, 'M', 'n'),
+ (0x1D664, 'M', 'o'),
+ (0x1D665, 'M', 'p'),
+ (0x1D666, 'M', 'q'),
+ (0x1D667, 'M', 'r'),
+ (0x1D668, 'M', 's'),
+ (0x1D669, 'M', 't'),
+ (0x1D66A, 'M', 'u'),
+ (0x1D66B, 'M', 'v'),
+ (0x1D66C, 'M', 'w'),
+ (0x1D66D, 'M', 'x'),
+ (0x1D66E, 'M', 'y'),
+ (0x1D66F, 'M', 'z'),
+ (0x1D670, 'M', 'a'),
+ (0x1D671, 'M', 'b'),
+ (0x1D672, 'M', 'c'),
+ (0x1D673, 'M', 'd'),
+ (0x1D674, 'M', 'e'),
+ (0x1D675, 'M', 'f'),
+ (0x1D676, 'M', 'g'),
+ (0x1D677, 'M', 'h'),
+ (0x1D678, 'M', 'i'),
+ (0x1D679, 'M', 'j'),
+ (0x1D67A, 'M', 'k'),
+ (0x1D67B, 'M', 'l'),
+ (0x1D67C, 'M', 'm'),
+ (0x1D67D, 'M', 'n'),
+ (0x1D67E, 'M', 'o'),
+ (0x1D67F, 'M', 'p'),
+ (0x1D680, 'M', 'q'),
+ (0x1D681, 'M', 'r'),
+ (0x1D682, 'M', 's'),
+ ]
+
+def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D683, 'M', 't'),
+ (0x1D684, 'M', 'u'),
+ (0x1D685, 'M', 'v'),
+ (0x1D686, 'M', 'w'),
+ (0x1D687, 'M', 'x'),
+ (0x1D688, 'M', 'y'),
+ (0x1D689, 'M', 'z'),
+ (0x1D68A, 'M', 'a'),
+ (0x1D68B, 'M', 'b'),
+ (0x1D68C, 'M', 'c'),
+ (0x1D68D, 'M', 'd'),
+ (0x1D68E, 'M', 'e'),
+ (0x1D68F, 'M', 'f'),
+ (0x1D690, 'M', 'g'),
+ (0x1D691, 'M', 'h'),
+ (0x1D692, 'M', 'i'),
+ (0x1D693, 'M', 'j'),
+ (0x1D694, 'M', 'k'),
+ (0x1D695, 'M', 'l'),
+ (0x1D696, 'M', 'm'),
+ (0x1D697, 'M', 'n'),
+ (0x1D698, 'M', 'o'),
+ (0x1D699, 'M', 'p'),
+ (0x1D69A, 'M', 'q'),
+ (0x1D69B, 'M', 'r'),
+ (0x1D69C, 'M', 's'),
+ (0x1D69D, 'M', 't'),
+ (0x1D69E, 'M', 'u'),
+ (0x1D69F, 'M', 'v'),
+ (0x1D6A0, 'M', 'w'),
+ (0x1D6A1, 'M', 'x'),
+ (0x1D6A2, 'M', 'y'),
+ (0x1D6A3, 'M', 'z'),
+ (0x1D6A4, 'M', 'ı'),
+ (0x1D6A5, 'M', 'ȷ'),
+ (0x1D6A6, 'X'),
+ (0x1D6A8, 'M', 'α'),
+ (0x1D6A9, 'M', 'β'),
+ (0x1D6AA, 'M', 'γ'),
+ (0x1D6AB, 'M', 'δ'),
+ (0x1D6AC, 'M', 'ε'),
+ (0x1D6AD, 'M', 'ζ'),
+ (0x1D6AE, 'M', 'η'),
+ (0x1D6AF, 'M', 'θ'),
+ (0x1D6B0, 'M', 'ι'),
+ (0x1D6B1, 'M', 'κ'),
+ (0x1D6B2, 'M', 'λ'),
+ (0x1D6B3, 'M', 'μ'),
+ (0x1D6B4, 'M', 'ν'),
+ (0x1D6B5, 'M', 'ξ'),
+ (0x1D6B6, 'M', 'ο'),
+ (0x1D6B7, 'M', 'π'),
+ (0x1D6B8, 'M', 'ρ'),
+ (0x1D6B9, 'M', 'θ'),
+ (0x1D6BA, 'M', 'σ'),
+ (0x1D6BB, 'M', 'τ'),
+ (0x1D6BC, 'M', 'υ'),
+ (0x1D6BD, 'M', 'φ'),
+ (0x1D6BE, 'M', 'χ'),
+ (0x1D6BF, 'M', 'ψ'),
+ (0x1D6C0, 'M', 'ω'),
+ (0x1D6C1, 'M', '∇'),
+ (0x1D6C2, 'M', 'α'),
+ (0x1D6C3, 'M', 'β'),
+ (0x1D6C4, 'M', 'γ'),
+ (0x1D6C5, 'M', 'δ'),
+ (0x1D6C6, 'M', 'ε'),
+ (0x1D6C7, 'M', 'ζ'),
+ (0x1D6C8, 'M', 'η'),
+ (0x1D6C9, 'M', 'θ'),
+ (0x1D6CA, 'M', 'ι'),
+ (0x1D6CB, 'M', 'κ'),
+ (0x1D6CC, 'M', 'λ'),
+ (0x1D6CD, 'M', 'μ'),
+ (0x1D6CE, 'M', 'ν'),
+ (0x1D6CF, 'M', 'ξ'),
+ (0x1D6D0, 'M', 'ο'),
+ (0x1D6D1, 'M', 'π'),
+ (0x1D6D2, 'M', 'ρ'),
+ (0x1D6D3, 'M', 'σ'),
+ (0x1D6D5, 'M', 'τ'),
+ (0x1D6D6, 'M', 'υ'),
+ (0x1D6D7, 'M', 'φ'),
+ (0x1D6D8, 'M', 'χ'),
+ (0x1D6D9, 'M', 'ψ'),
+ (0x1D6DA, 'M', 'ω'),
+ (0x1D6DB, 'M', '∂'),
+ (0x1D6DC, 'M', 'ε'),
+ (0x1D6DD, 'M', 'θ'),
+ (0x1D6DE, 'M', 'κ'),
+ (0x1D6DF, 'M', 'φ'),
+ (0x1D6E0, 'M', 'ρ'),
+ (0x1D6E1, 'M', 'π'),
+ (0x1D6E2, 'M', 'α'),
+ (0x1D6E3, 'M', 'β'),
+ (0x1D6E4, 'M', 'γ'),
+ (0x1D6E5, 'M', 'δ'),
+ (0x1D6E6, 'M', 'ε'),
+ (0x1D6E7, 'M', 'ζ'),
+ (0x1D6E8, 'M', 'η'),
+ ]
+
+def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D6E9, 'M', 'θ'),
+ (0x1D6EA, 'M', 'ι'),
+ (0x1D6EB, 'M', 'κ'),
+ (0x1D6EC, 'M', 'λ'),
+ (0x1D6ED, 'M', 'μ'),
+ (0x1D6EE, 'M', 'ν'),
+ (0x1D6EF, 'M', 'ξ'),
+ (0x1D6F0, 'M', 'ο'),
+ (0x1D6F1, 'M', 'π'),
+ (0x1D6F2, 'M', 'ρ'),
+ (0x1D6F3, 'M', 'θ'),
+ (0x1D6F4, 'M', 'σ'),
+ (0x1D6F5, 'M', 'τ'),
+ (0x1D6F6, 'M', 'υ'),
+ (0x1D6F7, 'M', 'φ'),
+ (0x1D6F8, 'M', 'χ'),
+ (0x1D6F9, 'M', 'ψ'),
+ (0x1D6FA, 'M', 'ω'),
+ (0x1D6FB, 'M', '∇'),
+ (0x1D6FC, 'M', 'α'),
+ (0x1D6FD, 'M', 'β'),
+ (0x1D6FE, 'M', 'γ'),
+ (0x1D6FF, 'M', 'δ'),
+ (0x1D700, 'M', 'ε'),
+ (0x1D701, 'M', 'ζ'),
+ (0x1D702, 'M', 'η'),
+ (0x1D703, 'M', 'θ'),
+ (0x1D704, 'M', 'ι'),
+ (0x1D705, 'M', 'κ'),
+ (0x1D706, 'M', 'λ'),
+ (0x1D707, 'M', 'μ'),
+ (0x1D708, 'M', 'ν'),
+ (0x1D709, 'M', 'ξ'),
+ (0x1D70A, 'M', 'ο'),
+ (0x1D70B, 'M', 'π'),
+ (0x1D70C, 'M', 'ρ'),
+ (0x1D70D, 'M', 'σ'),
+ (0x1D70F, 'M', 'τ'),
+ (0x1D710, 'M', 'υ'),
+ (0x1D711, 'M', 'φ'),
+ (0x1D712, 'M', 'χ'),
+ (0x1D713, 'M', 'ψ'),
+ (0x1D714, 'M', 'ω'),
+ (0x1D715, 'M', '∂'),
+ (0x1D716, 'M', 'ε'),
+ (0x1D717, 'M', 'θ'),
+ (0x1D718, 'M', 'κ'),
+ (0x1D719, 'M', 'φ'),
+ (0x1D71A, 'M', 'ρ'),
+ (0x1D71B, 'M', 'π'),
+ (0x1D71C, 'M', 'α'),
+ (0x1D71D, 'M', 'β'),
+ (0x1D71E, 'M', 'γ'),
+ (0x1D71F, 'M', 'δ'),
+ (0x1D720, 'M', 'ε'),
+ (0x1D721, 'M', 'ζ'),
+ (0x1D722, 'M', 'η'),
+ (0x1D723, 'M', 'θ'),
+ (0x1D724, 'M', 'ι'),
+ (0x1D725, 'M', 'κ'),
+ (0x1D726, 'M', 'λ'),
+ (0x1D727, 'M', 'μ'),
+ (0x1D728, 'M', 'ν'),
+ (0x1D729, 'M', 'ξ'),
+ (0x1D72A, 'M', 'ο'),
+ (0x1D72B, 'M', 'π'),
+ (0x1D72C, 'M', 'ρ'),
+ (0x1D72D, 'M', 'θ'),
+ (0x1D72E, 'M', 'σ'),
+ (0x1D72F, 'M', 'τ'),
+ (0x1D730, 'M', 'υ'),
+ (0x1D731, 'M', 'φ'),
+ (0x1D732, 'M', 'χ'),
+ (0x1D733, 'M', 'ψ'),
+ (0x1D734, 'M', 'ω'),
+ (0x1D735, 'M', '∇'),
+ (0x1D736, 'M', 'α'),
+ (0x1D737, 'M', 'β'),
+ (0x1D738, 'M', 'γ'),
+ (0x1D739, 'M', 'δ'),
+ (0x1D73A, 'M', 'ε'),
+ (0x1D73B, 'M', 'ζ'),
+ (0x1D73C, 'M', 'η'),
+ (0x1D73D, 'M', 'θ'),
+ (0x1D73E, 'M', 'ι'),
+ (0x1D73F, 'M', 'κ'),
+ (0x1D740, 'M', 'λ'),
+ (0x1D741, 'M', 'μ'),
+ (0x1D742, 'M', 'ν'),
+ (0x1D743, 'M', 'ξ'),
+ (0x1D744, 'M', 'ο'),
+ (0x1D745, 'M', 'π'),
+ (0x1D746, 'M', 'ρ'),
+ (0x1D747, 'M', 'σ'),
+ (0x1D749, 'M', 'τ'),
+ (0x1D74A, 'M', 'υ'),
+ (0x1D74B, 'M', 'φ'),
+ (0x1D74C, 'M', 'χ'),
+ (0x1D74D, 'M', 'ψ'),
+ (0x1D74E, 'M', 'ω'),
+ ]
+
+def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D74F, 'M', '∂'),
+ (0x1D750, 'M', 'ε'),
+ (0x1D751, 'M', 'θ'),
+ (0x1D752, 'M', 'κ'),
+ (0x1D753, 'M', 'φ'),
+ (0x1D754, 'M', 'ρ'),
+ (0x1D755, 'M', 'π'),
+ (0x1D756, 'M', 'α'),
+ (0x1D757, 'M', 'β'),
+ (0x1D758, 'M', 'γ'),
+ (0x1D759, 'M', 'δ'),
+ (0x1D75A, 'M', 'ε'),
+ (0x1D75B, 'M', 'ζ'),
+ (0x1D75C, 'M', 'η'),
+ (0x1D75D, 'M', 'θ'),
+ (0x1D75E, 'M', 'ι'),
+ (0x1D75F, 'M', 'κ'),
+ (0x1D760, 'M', 'λ'),
+ (0x1D761, 'M', 'μ'),
+ (0x1D762, 'M', 'ν'),
+ (0x1D763, 'M', 'ξ'),
+ (0x1D764, 'M', 'ο'),
+ (0x1D765, 'M', 'π'),
+ (0x1D766, 'M', 'ρ'),
+ (0x1D767, 'M', 'θ'),
+ (0x1D768, 'M', 'σ'),
+ (0x1D769, 'M', 'τ'),
+ (0x1D76A, 'M', 'υ'),
+ (0x1D76B, 'M', 'φ'),
+ (0x1D76C, 'M', 'χ'),
+ (0x1D76D, 'M', 'ψ'),
+ (0x1D76E, 'M', 'ω'),
+ (0x1D76F, 'M', '∇'),
+ (0x1D770, 'M', 'α'),
+ (0x1D771, 'M', 'β'),
+ (0x1D772, 'M', 'γ'),
+ (0x1D773, 'M', 'δ'),
+ (0x1D774, 'M', 'ε'),
+ (0x1D775, 'M', 'ζ'),
+ (0x1D776, 'M', 'η'),
+ (0x1D777, 'M', 'θ'),
+ (0x1D778, 'M', 'ι'),
+ (0x1D779, 'M', 'κ'),
+ (0x1D77A, 'M', 'λ'),
+ (0x1D77B, 'M', 'μ'),
+ (0x1D77C, 'M', 'ν'),
+ (0x1D77D, 'M', 'ξ'),
+ (0x1D77E, 'M', 'ο'),
+ (0x1D77F, 'M', 'π'),
+ (0x1D780, 'M', 'ρ'),
+ (0x1D781, 'M', 'σ'),
+ (0x1D783, 'M', 'τ'),
+ (0x1D784, 'M', 'υ'),
+ (0x1D785, 'M', 'φ'),
+ (0x1D786, 'M', 'χ'),
+ (0x1D787, 'M', 'ψ'),
+ (0x1D788, 'M', 'ω'),
+ (0x1D789, 'M', '∂'),
+ (0x1D78A, 'M', 'ε'),
+ (0x1D78B, 'M', 'θ'),
+ (0x1D78C, 'M', 'κ'),
+ (0x1D78D, 'M', 'φ'),
+ (0x1D78E, 'M', 'ρ'),
+ (0x1D78F, 'M', 'π'),
+ (0x1D790, 'M', 'α'),
+ (0x1D791, 'M', 'β'),
+ (0x1D792, 'M', 'γ'),
+ (0x1D793, 'M', 'δ'),
+ (0x1D794, 'M', 'ε'),
+ (0x1D795, 'M', 'ζ'),
+ (0x1D796, 'M', 'η'),
+ (0x1D797, 'M', 'θ'),
+ (0x1D798, 'M', 'ι'),
+ (0x1D799, 'M', 'κ'),
+ (0x1D79A, 'M', 'λ'),
+ (0x1D79B, 'M', 'μ'),
+ (0x1D79C, 'M', 'ν'),
+ (0x1D79D, 'M', 'ξ'),
+ (0x1D79E, 'M', 'ο'),
+ (0x1D79F, 'M', 'π'),
+ (0x1D7A0, 'M', 'ρ'),
+ (0x1D7A1, 'M', 'θ'),
+ (0x1D7A2, 'M', 'σ'),
+ (0x1D7A3, 'M', 'τ'),
+ (0x1D7A4, 'M', 'υ'),
+ (0x1D7A5, 'M', 'φ'),
+ (0x1D7A6, 'M', 'χ'),
+ (0x1D7A7, 'M', 'ψ'),
+ (0x1D7A8, 'M', 'ω'),
+ (0x1D7A9, 'M', '∇'),
+ (0x1D7AA, 'M', 'α'),
+ (0x1D7AB, 'M', 'β'),
+ (0x1D7AC, 'M', 'γ'),
+ (0x1D7AD, 'M', 'δ'),
+ (0x1D7AE, 'M', 'ε'),
+ (0x1D7AF, 'M', 'ζ'),
+ (0x1D7B0, 'M', 'η'),
+ (0x1D7B1, 'M', 'θ'),
+ (0x1D7B2, 'M', 'ι'),
+ (0x1D7B3, 'M', 'κ'),
+ ]
+
+def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D7B4, 'M', 'λ'),
+ (0x1D7B5, 'M', 'μ'),
+ (0x1D7B6, 'M', 'ν'),
+ (0x1D7B7, 'M', 'ξ'),
+ (0x1D7B8, 'M', 'ο'),
+ (0x1D7B9, 'M', 'π'),
+ (0x1D7BA, 'M', 'ρ'),
+ (0x1D7BB, 'M', 'σ'),
+ (0x1D7BD, 'M', 'τ'),
+ (0x1D7BE, 'M', 'υ'),
+ (0x1D7BF, 'M', 'φ'),
+ (0x1D7C0, 'M', 'χ'),
+ (0x1D7C1, 'M', 'ψ'),
+ (0x1D7C2, 'M', 'ω'),
+ (0x1D7C3, 'M', '∂'),
+ (0x1D7C4, 'M', 'ε'),
+ (0x1D7C5, 'M', 'θ'),
+ (0x1D7C6, 'M', 'κ'),
+ (0x1D7C7, 'M', 'φ'),
+ (0x1D7C8, 'M', 'ρ'),
+ (0x1D7C9, 'M', 'π'),
+ (0x1D7CA, 'M', 'ϝ'),
+ (0x1D7CC, 'X'),
+ (0x1D7CE, 'M', '0'),
+ (0x1D7CF, 'M', '1'),
+ (0x1D7D0, 'M', '2'),
+ (0x1D7D1, 'M', '3'),
+ (0x1D7D2, 'M', '4'),
+ (0x1D7D3, 'M', '5'),
+ (0x1D7D4, 'M', '6'),
+ (0x1D7D5, 'M', '7'),
+ (0x1D7D6, 'M', '8'),
+ (0x1D7D7, 'M', '9'),
+ (0x1D7D8, 'M', '0'),
+ (0x1D7D9, 'M', '1'),
+ (0x1D7DA, 'M', '2'),
+ (0x1D7DB, 'M', '3'),
+ (0x1D7DC, 'M', '4'),
+ (0x1D7DD, 'M', '5'),
+ (0x1D7DE, 'M', '6'),
+ (0x1D7DF, 'M', '7'),
+ (0x1D7E0, 'M', '8'),
+ (0x1D7E1, 'M', '9'),
+ (0x1D7E2, 'M', '0'),
+ (0x1D7E3, 'M', '1'),
+ (0x1D7E4, 'M', '2'),
+ (0x1D7E5, 'M', '3'),
+ (0x1D7E6, 'M', '4'),
+ (0x1D7E7, 'M', '5'),
+ (0x1D7E8, 'M', '6'),
+ (0x1D7E9, 'M', '7'),
+ (0x1D7EA, 'M', '8'),
+ (0x1D7EB, 'M', '9'),
+ (0x1D7EC, 'M', '0'),
+ (0x1D7ED, 'M', '1'),
+ (0x1D7EE, 'M', '2'),
+ (0x1D7EF, 'M', '3'),
+ (0x1D7F0, 'M', '4'),
+ (0x1D7F1, 'M', '5'),
+ (0x1D7F2, 'M', '6'),
+ (0x1D7F3, 'M', '7'),
+ (0x1D7F4, 'M', '8'),
+ (0x1D7F5, 'M', '9'),
+ (0x1D7F6, 'M', '0'),
+ (0x1D7F7, 'M', '1'),
+ (0x1D7F8, 'M', '2'),
+ (0x1D7F9, 'M', '3'),
+ (0x1D7FA, 'M', '4'),
+ (0x1D7FB, 'M', '5'),
+ (0x1D7FC, 'M', '6'),
+ (0x1D7FD, 'M', '7'),
+ (0x1D7FE, 'M', '8'),
+ (0x1D7FF, 'M', '9'),
+ (0x1D800, 'V'),
+ (0x1DA8C, 'X'),
+ (0x1DA9B, 'V'),
+ (0x1DAA0, 'X'),
+ (0x1DAA1, 'V'),
+ (0x1DAB0, 'X'),
+ (0x1DF00, 'V'),
+ (0x1DF1F, 'X'),
+ (0x1DF25, 'V'),
+ (0x1DF2B, 'X'),
+ (0x1E000, 'V'),
+ (0x1E007, 'X'),
+ (0x1E008, 'V'),
+ (0x1E019, 'X'),
+ (0x1E01B, 'V'),
+ (0x1E022, 'X'),
+ (0x1E023, 'V'),
+ (0x1E025, 'X'),
+ (0x1E026, 'V'),
+ (0x1E02B, 'X'),
+ (0x1E030, 'M', 'а'),
+ (0x1E031, 'M', 'б'),
+ (0x1E032, 'M', 'в'),
+ (0x1E033, 'M', 'г'),
+ (0x1E034, 'M', 'д'),
+ (0x1E035, 'M', 'е'),
+ (0x1E036, 'M', 'ж'),
+ ]
+
+def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1E037, 'M', 'з'),
+ (0x1E038, 'M', 'и'),
+ (0x1E039, 'M', 'к'),
+ (0x1E03A, 'M', 'л'),
+ (0x1E03B, 'M', 'м'),
+ (0x1E03C, 'M', 'о'),
+ (0x1E03D, 'M', 'п'),
+ (0x1E03E, 'M', 'р'),
+ (0x1E03F, 'M', 'с'),
+ (0x1E040, 'M', 'т'),
+ (0x1E041, 'M', 'у'),
+ (0x1E042, 'M', 'ф'),
+ (0x1E043, 'M', 'х'),
+ (0x1E044, 'M', 'ц'),
+ (0x1E045, 'M', 'ч'),
+ (0x1E046, 'M', 'ш'),
+ (0x1E047, 'M', 'ы'),
+ (0x1E048, 'M', 'э'),
+ (0x1E049, 'M', 'ю'),
+ (0x1E04A, 'M', 'ꚉ'),
+ (0x1E04B, 'M', 'ә'),
+ (0x1E04C, 'M', 'і'),
+ (0x1E04D, 'M', 'ј'),
+ (0x1E04E, 'M', 'ө'),
+ (0x1E04F, 'M', 'ү'),
+ (0x1E050, 'M', 'ӏ'),
+ (0x1E051, 'M', 'а'),
+ (0x1E052, 'M', 'б'),
+ (0x1E053, 'M', 'в'),
+ (0x1E054, 'M', 'г'),
+ (0x1E055, 'M', 'д'),
+ (0x1E056, 'M', 'е'),
+ (0x1E057, 'M', 'ж'),
+ (0x1E058, 'M', 'з'),
+ (0x1E059, 'M', 'и'),
+ (0x1E05A, 'M', 'к'),
+ (0x1E05B, 'M', 'л'),
+ (0x1E05C, 'M', 'о'),
+ (0x1E05D, 'M', 'п'),
+ (0x1E05E, 'M', 'с'),
+ (0x1E05F, 'M', 'у'),
+ (0x1E060, 'M', 'ф'),
+ (0x1E061, 'M', 'х'),
+ (0x1E062, 'M', 'ц'),
+ (0x1E063, 'M', 'ч'),
+ (0x1E064, 'M', 'ш'),
+ (0x1E065, 'M', 'ъ'),
+ (0x1E066, 'M', 'ы'),
+ (0x1E067, 'M', 'ґ'),
+ (0x1E068, 'M', 'і'),
+ (0x1E069, 'M', 'ѕ'),
+ (0x1E06A, 'M', 'џ'),
+ (0x1E06B, 'M', 'ҫ'),
+ (0x1E06C, 'M', 'ꙑ'),
+ (0x1E06D, 'M', 'ұ'),
+ (0x1E06E, 'X'),
+ (0x1E08F, 'V'),
+ (0x1E090, 'X'),
+ (0x1E100, 'V'),
+ (0x1E12D, 'X'),
+ (0x1E130, 'V'),
+ (0x1E13E, 'X'),
+ (0x1E140, 'V'),
+ (0x1E14A, 'X'),
+ (0x1E14E, 'V'),
+ (0x1E150, 'X'),
+ (0x1E290, 'V'),
+ (0x1E2AF, 'X'),
+ (0x1E2C0, 'V'),
+ (0x1E2FA, 'X'),
+ (0x1E2FF, 'V'),
+ (0x1E300, 'X'),
+ (0x1E4D0, 'V'),
+ (0x1E4FA, 'X'),
+ (0x1E7E0, 'V'),
+ (0x1E7E7, 'X'),
+ (0x1E7E8, 'V'),
+ (0x1E7EC, 'X'),
+ (0x1E7ED, 'V'),
+ (0x1E7EF, 'X'),
+ (0x1E7F0, 'V'),
+ (0x1E7FF, 'X'),
+ (0x1E800, 'V'),
+ (0x1E8C5, 'X'),
+ (0x1E8C7, 'V'),
+ (0x1E8D7, 'X'),
+ (0x1E900, 'M', '𞤢'),
+ (0x1E901, 'M', '𞤣'),
+ (0x1E902, 'M', '𞤤'),
+ (0x1E903, 'M', '𞤥'),
+ (0x1E904, 'M', '𞤦'),
+ (0x1E905, 'M', '𞤧'),
+ (0x1E906, 'M', '𞤨'),
+ (0x1E907, 'M', '𞤩'),
+ (0x1E908, 'M', '𞤪'),
+ (0x1E909, 'M', '𞤫'),
+ (0x1E90A, 'M', '𞤬'),
+ (0x1E90B, 'M', '𞤭'),
+ (0x1E90C, 'M', '𞤮'),
+ (0x1E90D, 'M', '𞤯'),
+ ]
+
+def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1E90E, 'M', '𞤰'),
+ (0x1E90F, 'M', '𞤱'),
+ (0x1E910, 'M', '𞤲'),
+ (0x1E911, 'M', '𞤳'),
+ (0x1E912, 'M', '𞤴'),
+ (0x1E913, 'M', '𞤵'),
+ (0x1E914, 'M', '𞤶'),
+ (0x1E915, 'M', '𞤷'),
+ (0x1E916, 'M', '𞤸'),
+ (0x1E917, 'M', '𞤹'),
+ (0x1E918, 'M', '𞤺'),
+ (0x1E919, 'M', '𞤻'),
+ (0x1E91A, 'M', '𞤼'),
+ (0x1E91B, 'M', '𞤽'),
+ (0x1E91C, 'M', '𞤾'),
+ (0x1E91D, 'M', '𞤿'),
+ (0x1E91E, 'M', '𞥀'),
+ (0x1E91F, 'M', '𞥁'),
+ (0x1E920, 'M', '𞥂'),
+ (0x1E921, 'M', '𞥃'),
+ (0x1E922, 'V'),
+ (0x1E94C, 'X'),
+ (0x1E950, 'V'),
+ (0x1E95A, 'X'),
+ (0x1E95E, 'V'),
+ (0x1E960, 'X'),
+ (0x1EC71, 'V'),
+ (0x1ECB5, 'X'),
+ (0x1ED01, 'V'),
+ (0x1ED3E, 'X'),
+ (0x1EE00, 'M', 'ا'),
+ (0x1EE01, 'M', 'ب'),
+ (0x1EE02, 'M', 'ج'),
+ (0x1EE03, 'M', 'د'),
+ (0x1EE04, 'X'),
+ (0x1EE05, 'M', 'و'),
+ (0x1EE06, 'M', 'ز'),
+ (0x1EE07, 'M', 'ح'),
+ (0x1EE08, 'M', 'ط'),
+ (0x1EE09, 'M', 'ي'),
+ (0x1EE0A, 'M', 'ك'),
+ (0x1EE0B, 'M', 'ل'),
+ (0x1EE0C, 'M', 'م'),
+ (0x1EE0D, 'M', 'ن'),
+ (0x1EE0E, 'M', 'س'),
+ (0x1EE0F, 'M', 'ع'),
+ (0x1EE10, 'M', 'ف'),
+ (0x1EE11, 'M', 'ص'),
+ (0x1EE12, 'M', 'ق'),
+ (0x1EE13, 'M', 'ر'),
+ (0x1EE14, 'M', 'ش'),
+ (0x1EE15, 'M', 'ت'),
+ (0x1EE16, 'M', 'ث'),
+ (0x1EE17, 'M', 'خ'),
+ (0x1EE18, 'M', 'ذ'),
+ (0x1EE19, 'M', 'ض'),
+ (0x1EE1A, 'M', 'ظ'),
+ (0x1EE1B, 'M', 'غ'),
+ (0x1EE1C, 'M', 'ٮ'),
+ (0x1EE1D, 'M', 'ں'),
+ (0x1EE1E, 'M', 'ڡ'),
+ (0x1EE1F, 'M', 'ٯ'),
+ (0x1EE20, 'X'),
+ (0x1EE21, 'M', 'ب'),
+ (0x1EE22, 'M', 'ج'),
+ (0x1EE23, 'X'),
+ (0x1EE24, 'M', 'ه'),
+ (0x1EE25, 'X'),
+ (0x1EE27, 'M', 'ح'),
+ (0x1EE28, 'X'),
+ (0x1EE29, 'M', 'ي'),
+ (0x1EE2A, 'M', 'ك'),
+ (0x1EE2B, 'M', 'ل'),
+ (0x1EE2C, 'M', 'م'),
+ (0x1EE2D, 'M', 'ن'),
+ (0x1EE2E, 'M', 'س'),
+ (0x1EE2F, 'M', 'ع'),
+ (0x1EE30, 'M', 'ف'),
+ (0x1EE31, 'M', 'ص'),
+ (0x1EE32, 'M', 'ق'),
+ (0x1EE33, 'X'),
+ (0x1EE34, 'M', 'ش'),
+ (0x1EE35, 'M', 'ت'),
+ (0x1EE36, 'M', 'ث'),
+ (0x1EE37, 'M', 'خ'),
+ (0x1EE38, 'X'),
+ (0x1EE39, 'M', 'ض'),
+ (0x1EE3A, 'X'),
+ (0x1EE3B, 'M', 'غ'),
+ (0x1EE3C, 'X'),
+ (0x1EE42, 'M', 'ج'),
+ (0x1EE43, 'X'),
+ (0x1EE47, 'M', 'ح'),
+ (0x1EE48, 'X'),
+ (0x1EE49, 'M', 'ي'),
+ (0x1EE4A, 'X'),
+ (0x1EE4B, 'M', 'ل'),
+ (0x1EE4C, 'X'),
+ (0x1EE4D, 'M', 'ن'),
+ (0x1EE4E, 'M', 'س'),
+ ]
+
+def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1EE4F, 'M', 'ع'),
+ (0x1EE50, 'X'),
+ (0x1EE51, 'M', 'ص'),
+ (0x1EE52, 'M', 'ق'),
+ (0x1EE53, 'X'),
+ (0x1EE54, 'M', 'ش'),
+ (0x1EE55, 'X'),
+ (0x1EE57, 'M', 'خ'),
+ (0x1EE58, 'X'),
+ (0x1EE59, 'M', 'ض'),
+ (0x1EE5A, 'X'),
+ (0x1EE5B, 'M', 'غ'),
+ (0x1EE5C, 'X'),
+ (0x1EE5D, 'M', 'ں'),
+ (0x1EE5E, 'X'),
+ (0x1EE5F, 'M', 'ٯ'),
+ (0x1EE60, 'X'),
+ (0x1EE61, 'M', 'ب'),
+ (0x1EE62, 'M', 'ج'),
+ (0x1EE63, 'X'),
+ (0x1EE64, 'M', 'ه'),
+ (0x1EE65, 'X'),
+ (0x1EE67, 'M', 'ح'),
+ (0x1EE68, 'M', 'ط'),
+ (0x1EE69, 'M', 'ي'),
+ (0x1EE6A, 'M', 'ك'),
+ (0x1EE6B, 'X'),
+ (0x1EE6C, 'M', 'م'),
+ (0x1EE6D, 'M', 'ن'),
+ (0x1EE6E, 'M', 'س'),
+ (0x1EE6F, 'M', 'ع'),
+ (0x1EE70, 'M', 'ف'),
+ (0x1EE71, 'M', 'ص'),
+ (0x1EE72, 'M', 'ق'),
+ (0x1EE73, 'X'),
+ (0x1EE74, 'M', 'ش'),
+ (0x1EE75, 'M', 'ت'),
+ (0x1EE76, 'M', 'ث'),
+ (0x1EE77, 'M', 'خ'),
+ (0x1EE78, 'X'),
+ (0x1EE79, 'M', 'ض'),
+ (0x1EE7A, 'M', 'ظ'),
+ (0x1EE7B, 'M', 'غ'),
+ (0x1EE7C, 'M', 'ٮ'),
+ (0x1EE7D, 'X'),
+ (0x1EE7E, 'M', 'ڡ'),
+ (0x1EE7F, 'X'),
+ (0x1EE80, 'M', 'ا'),
+ (0x1EE81, 'M', 'ب'),
+ (0x1EE82, 'M', 'ج'),
+ (0x1EE83, 'M', 'د'),
+ (0x1EE84, 'M', 'ه'),
+ (0x1EE85, 'M', 'و'),
+ (0x1EE86, 'M', 'ز'),
+ (0x1EE87, 'M', 'ح'),
+ (0x1EE88, 'M', 'ط'),
+ (0x1EE89, 'M', 'ي'),
+ (0x1EE8A, 'X'),
+ (0x1EE8B, 'M', 'ل'),
+ (0x1EE8C, 'M', 'م'),
+ (0x1EE8D, 'M', 'ن'),
+ (0x1EE8E, 'M', 'س'),
+ (0x1EE8F, 'M', 'ع'),
+ (0x1EE90, 'M', 'ف'),
+ (0x1EE91, 'M', 'ص'),
+ (0x1EE92, 'M', 'ق'),
+ (0x1EE93, 'M', 'ر'),
+ (0x1EE94, 'M', 'ش'),
+ (0x1EE95, 'M', 'ت'),
+ (0x1EE96, 'M', 'ث'),
+ (0x1EE97, 'M', 'خ'),
+ (0x1EE98, 'M', 'ذ'),
+ (0x1EE99, 'M', 'ض'),
+ (0x1EE9A, 'M', 'ظ'),
+ (0x1EE9B, 'M', 'غ'),
+ (0x1EE9C, 'X'),
+ (0x1EEA1, 'M', 'ب'),
+ (0x1EEA2, 'M', 'ج'),
+ (0x1EEA3, 'M', 'د'),
+ (0x1EEA4, 'X'),
+ (0x1EEA5, 'M', 'و'),
+ (0x1EEA6, 'M', 'ز'),
+ (0x1EEA7, 'M', 'ح'),
+ (0x1EEA8, 'M', 'ط'),
+ (0x1EEA9, 'M', 'ي'),
+ (0x1EEAA, 'X'),
+ (0x1EEAB, 'M', 'ل'),
+ (0x1EEAC, 'M', 'م'),
+ (0x1EEAD, 'M', 'ن'),
+ (0x1EEAE, 'M', 'س'),
+ (0x1EEAF, 'M', 'ع'),
+ (0x1EEB0, 'M', 'ف'),
+ (0x1EEB1, 'M', 'ص'),
+ (0x1EEB2, 'M', 'ق'),
+ (0x1EEB3, 'M', 'ر'),
+ (0x1EEB4, 'M', 'ش'),
+ (0x1EEB5, 'M', 'ت'),
+ (0x1EEB6, 'M', 'ث'),
+ (0x1EEB7, 'M', 'خ'),
+ (0x1EEB8, 'M', 'ذ'),
+ ]
+
+def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1EEB9, 'M', 'ض'),
+ (0x1EEBA, 'M', 'ظ'),
+ (0x1EEBB, 'M', 'غ'),
+ (0x1EEBC, 'X'),
+ (0x1EEF0, 'V'),
+ (0x1EEF2, 'X'),
+ (0x1F000, 'V'),
+ (0x1F02C, 'X'),
+ (0x1F030, 'V'),
+ (0x1F094, 'X'),
+ (0x1F0A0, 'V'),
+ (0x1F0AF, 'X'),
+ (0x1F0B1, 'V'),
+ (0x1F0C0, 'X'),
+ (0x1F0C1, 'V'),
+ (0x1F0D0, 'X'),
+ (0x1F0D1, 'V'),
+ (0x1F0F6, 'X'),
+ (0x1F101, '3', '0,'),
+ (0x1F102, '3', '1,'),
+ (0x1F103, '3', '2,'),
+ (0x1F104, '3', '3,'),
+ (0x1F105, '3', '4,'),
+ (0x1F106, '3', '5,'),
+ (0x1F107, '3', '6,'),
+ (0x1F108, '3', '7,'),
+ (0x1F109, '3', '8,'),
+ (0x1F10A, '3', '9,'),
+ (0x1F10B, 'V'),
+ (0x1F110, '3', '(a)'),
+ (0x1F111, '3', '(b)'),
+ (0x1F112, '3', '(c)'),
+ (0x1F113, '3', '(d)'),
+ (0x1F114, '3', '(e)'),
+ (0x1F115, '3', '(f)'),
+ (0x1F116, '3', '(g)'),
+ (0x1F117, '3', '(h)'),
+ (0x1F118, '3', '(i)'),
+ (0x1F119, '3', '(j)'),
+ (0x1F11A, '3', '(k)'),
+ (0x1F11B, '3', '(l)'),
+ (0x1F11C, '3', '(m)'),
+ (0x1F11D, '3', '(n)'),
+ (0x1F11E, '3', '(o)'),
+ (0x1F11F, '3', '(p)'),
+ (0x1F120, '3', '(q)'),
+ (0x1F121, '3', '(r)'),
+ (0x1F122, '3', '(s)'),
+ (0x1F123, '3', '(t)'),
+ (0x1F124, '3', '(u)'),
+ (0x1F125, '3', '(v)'),
+ (0x1F126, '3', '(w)'),
+ (0x1F127, '3', '(x)'),
+ (0x1F128, '3', '(y)'),
+ (0x1F129, '3', '(z)'),
+ (0x1F12A, 'M', '〔s〕'),
+ (0x1F12B, 'M', 'c'),
+ (0x1F12C, 'M', 'r'),
+ (0x1F12D, 'M', 'cd'),
+ (0x1F12E, 'M', 'wz'),
+ (0x1F12F, 'V'),
+ (0x1F130, 'M', 'a'),
+ (0x1F131, 'M', 'b'),
+ (0x1F132, 'M', 'c'),
+ (0x1F133, 'M', 'd'),
+ (0x1F134, 'M', 'e'),
+ (0x1F135, 'M', 'f'),
+ (0x1F136, 'M', 'g'),
+ (0x1F137, 'M', 'h'),
+ (0x1F138, 'M', 'i'),
+ (0x1F139, 'M', 'j'),
+ (0x1F13A, 'M', 'k'),
+ (0x1F13B, 'M', 'l'),
+ (0x1F13C, 'M', 'm'),
+ (0x1F13D, 'M', 'n'),
+ (0x1F13E, 'M', 'o'),
+ (0x1F13F, 'M', 'p'),
+ (0x1F140, 'M', 'q'),
+ (0x1F141, 'M', 'r'),
+ (0x1F142, 'M', 's'),
+ (0x1F143, 'M', 't'),
+ (0x1F144, 'M', 'u'),
+ (0x1F145, 'M', 'v'),
+ (0x1F146, 'M', 'w'),
+ (0x1F147, 'M', 'x'),
+ (0x1F148, 'M', 'y'),
+ (0x1F149, 'M', 'z'),
+ (0x1F14A, 'M', 'hv'),
+ (0x1F14B, 'M', 'mv'),
+ (0x1F14C, 'M', 'sd'),
+ (0x1F14D, 'M', 'ss'),
+ (0x1F14E, 'M', 'ppv'),
+ (0x1F14F, 'M', 'wc'),
+ (0x1F150, 'V'),
+ (0x1F16A, 'M', 'mc'),
+ (0x1F16B, 'M', 'md'),
+ (0x1F16C, 'M', 'mr'),
+ (0x1F16D, 'V'),
+ (0x1F190, 'M', 'dj'),
+ (0x1F191, 'V'),
+ ]
+
+def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1F1AE, 'X'),
+ (0x1F1E6, 'V'),
+ (0x1F200, 'M', 'ほか'),
+ (0x1F201, 'M', 'ココ'),
+ (0x1F202, 'M', 'サ'),
+ (0x1F203, 'X'),
+ (0x1F210, 'M', '手'),
+ (0x1F211, 'M', '字'),
+ (0x1F212, 'M', '双'),
+ (0x1F213, 'M', 'デ'),
+ (0x1F214, 'M', '二'),
+ (0x1F215, 'M', '多'),
+ (0x1F216, 'M', '解'),
+ (0x1F217, 'M', '天'),
+ (0x1F218, 'M', '交'),
+ (0x1F219, 'M', '映'),
+ (0x1F21A, 'M', '無'),
+ (0x1F21B, 'M', '料'),
+ (0x1F21C, 'M', '前'),
+ (0x1F21D, 'M', '後'),
+ (0x1F21E, 'M', '再'),
+ (0x1F21F, 'M', '新'),
+ (0x1F220, 'M', '初'),
+ (0x1F221, 'M', '終'),
+ (0x1F222, 'M', '生'),
+ (0x1F223, 'M', '販'),
+ (0x1F224, 'M', '声'),
+ (0x1F225, 'M', '吹'),
+ (0x1F226, 'M', '演'),
+ (0x1F227, 'M', '投'),
+ (0x1F228, 'M', '捕'),
+ (0x1F229, 'M', '一'),
+ (0x1F22A, 'M', '三'),
+ (0x1F22B, 'M', '遊'),
+ (0x1F22C, 'M', '左'),
+ (0x1F22D, 'M', '中'),
+ (0x1F22E, 'M', '右'),
+ (0x1F22F, 'M', '指'),
+ (0x1F230, 'M', '走'),
+ (0x1F231, 'M', '打'),
+ (0x1F232, 'M', '禁'),
+ (0x1F233, 'M', '空'),
+ (0x1F234, 'M', '合'),
+ (0x1F235, 'M', '満'),
+ (0x1F236, 'M', '有'),
+ (0x1F237, 'M', '月'),
+ (0x1F238, 'M', '申'),
+ (0x1F239, 'M', '割'),
+ (0x1F23A, 'M', '営'),
+ (0x1F23B, 'M', '配'),
+ (0x1F23C, 'X'),
+ (0x1F240, 'M', '〔本〕'),
+ (0x1F241, 'M', '〔三〕'),
+ (0x1F242, 'M', '〔二〕'),
+ (0x1F243, 'M', '〔安〕'),
+ (0x1F244, 'M', '〔点〕'),
+ (0x1F245, 'M', '〔打〕'),
+ (0x1F246, 'M', '〔盗〕'),
+ (0x1F247, 'M', '〔勝〕'),
+ (0x1F248, 'M', '〔敗〕'),
+ (0x1F249, 'X'),
+ (0x1F250, 'M', '得'),
+ (0x1F251, 'M', '可'),
+ (0x1F252, 'X'),
+ (0x1F260, 'V'),
+ (0x1F266, 'X'),
+ (0x1F300, 'V'),
+ (0x1F6D8, 'X'),
+ (0x1F6DC, 'V'),
+ (0x1F6ED, 'X'),
+ (0x1F6F0, 'V'),
+ (0x1F6FD, 'X'),
+ (0x1F700, 'V'),
+ (0x1F777, 'X'),
+ (0x1F77B, 'V'),
+ (0x1F7DA, 'X'),
+ (0x1F7E0, 'V'),
+ (0x1F7EC, 'X'),
+ (0x1F7F0, 'V'),
+ (0x1F7F1, 'X'),
+ (0x1F800, 'V'),
+ (0x1F80C, 'X'),
+ (0x1F810, 'V'),
+ (0x1F848, 'X'),
+ (0x1F850, 'V'),
+ (0x1F85A, 'X'),
+ (0x1F860, 'V'),
+ (0x1F888, 'X'),
+ (0x1F890, 'V'),
+ (0x1F8AE, 'X'),
+ (0x1F8B0, 'V'),
+ (0x1F8B2, 'X'),
+ (0x1F900, 'V'),
+ (0x1FA54, 'X'),
+ (0x1FA60, 'V'),
+ (0x1FA6E, 'X'),
+ (0x1FA70, 'V'),
+ (0x1FA7D, 'X'),
+ (0x1FA80, 'V'),
+ (0x1FA89, 'X'),
+ ]
+
+def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1FA90, 'V'),
+ (0x1FABE, 'X'),
+ (0x1FABF, 'V'),
+ (0x1FAC6, 'X'),
+ (0x1FACE, 'V'),
+ (0x1FADC, 'X'),
+ (0x1FAE0, 'V'),
+ (0x1FAE9, 'X'),
+ (0x1FAF0, 'V'),
+ (0x1FAF9, 'X'),
+ (0x1FB00, 'V'),
+ (0x1FB93, 'X'),
+ (0x1FB94, 'V'),
+ (0x1FBCB, 'X'),
+ (0x1FBF0, 'M', '0'),
+ (0x1FBF1, 'M', '1'),
+ (0x1FBF2, 'M', '2'),
+ (0x1FBF3, 'M', '3'),
+ (0x1FBF4, 'M', '4'),
+ (0x1FBF5, 'M', '5'),
+ (0x1FBF6, 'M', '6'),
+ (0x1FBF7, 'M', '7'),
+ (0x1FBF8, 'M', '8'),
+ (0x1FBF9, 'M', '9'),
+ (0x1FBFA, 'X'),
+ (0x20000, 'V'),
+ (0x2A6E0, 'X'),
+ (0x2A700, 'V'),
+ (0x2B73A, 'X'),
+ (0x2B740, 'V'),
+ (0x2B81E, 'X'),
+ (0x2B820, 'V'),
+ (0x2CEA2, 'X'),
+ (0x2CEB0, 'V'),
+ (0x2EBE1, 'X'),
+ (0x2EBF0, 'V'),
+ (0x2EE5E, 'X'),
+ (0x2F800, 'M', '丽'),
+ (0x2F801, 'M', '丸'),
+ (0x2F802, 'M', '乁'),
+ (0x2F803, 'M', '𠄢'),
+ (0x2F804, 'M', '你'),
+ (0x2F805, 'M', '侮'),
+ (0x2F806, 'M', '侻'),
+ (0x2F807, 'M', '倂'),
+ (0x2F808, 'M', '偺'),
+ (0x2F809, 'M', '備'),
+ (0x2F80A, 'M', '僧'),
+ (0x2F80B, 'M', '像'),
+ (0x2F80C, 'M', '㒞'),
+ (0x2F80D, 'M', '𠘺'),
+ (0x2F80E, 'M', '免'),
+ (0x2F80F, 'M', '兔'),
+ (0x2F810, 'M', '兤'),
+ (0x2F811, 'M', '具'),
+ (0x2F812, 'M', '𠔜'),
+ (0x2F813, 'M', '㒹'),
+ (0x2F814, 'M', '內'),
+ (0x2F815, 'M', '再'),
+ (0x2F816, 'M', '𠕋'),
+ (0x2F817, 'M', '冗'),
+ (0x2F818, 'M', '冤'),
+ (0x2F819, 'M', '仌'),
+ (0x2F81A, 'M', '冬'),
+ (0x2F81B, 'M', '况'),
+ (0x2F81C, 'M', '𩇟'),
+ (0x2F81D, 'M', '凵'),
+ (0x2F81E, 'M', '刃'),
+ (0x2F81F, 'M', '㓟'),
+ (0x2F820, 'M', '刻'),
+ (0x2F821, 'M', '剆'),
+ (0x2F822, 'M', '割'),
+ (0x2F823, 'M', '剷'),
+ (0x2F824, 'M', '㔕'),
+ (0x2F825, 'M', '勇'),
+ (0x2F826, 'M', '勉'),
+ (0x2F827, 'M', '勤'),
+ (0x2F828, 'M', '勺'),
+ (0x2F829, 'M', '包'),
+ (0x2F82A, 'M', '匆'),
+ (0x2F82B, 'M', '北'),
+ (0x2F82C, 'M', '卉'),
+ (0x2F82D, 'M', '卑'),
+ (0x2F82E, 'M', '博'),
+ (0x2F82F, 'M', '即'),
+ (0x2F830, 'M', '卽'),
+ (0x2F831, 'M', '卿'),
+ (0x2F834, 'M', '𠨬'),
+ (0x2F835, 'M', '灰'),
+ (0x2F836, 'M', '及'),
+ (0x2F837, 'M', '叟'),
+ (0x2F838, 'M', '𠭣'),
+ (0x2F839, 'M', '叫'),
+ (0x2F83A, 'M', '叱'),
+ (0x2F83B, 'M', '吆'),
+ (0x2F83C, 'M', '咞'),
+ (0x2F83D, 'M', '吸'),
+ (0x2F83E, 'M', '呈'),
+ (0x2F83F, 'M', '周'),
+ (0x2F840, 'M', '咢'),
+ ]
+
+def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F841, 'M', '哶'),
+ (0x2F842, 'M', '唐'),
+ (0x2F843, 'M', '啓'),
+ (0x2F844, 'M', '啣'),
+ (0x2F845, 'M', '善'),
+ (0x2F847, 'M', '喙'),
+ (0x2F848, 'M', '喫'),
+ (0x2F849, 'M', '喳'),
+ (0x2F84A, 'M', '嗂'),
+ (0x2F84B, 'M', '圖'),
+ (0x2F84C, 'M', '嘆'),
+ (0x2F84D, 'M', '圗'),
+ (0x2F84E, 'M', '噑'),
+ (0x2F84F, 'M', '噴'),
+ (0x2F850, 'M', '切'),
+ (0x2F851, 'M', '壮'),
+ (0x2F852, 'M', '城'),
+ (0x2F853, 'M', '埴'),
+ (0x2F854, 'M', '堍'),
+ (0x2F855, 'M', '型'),
+ (0x2F856, 'M', '堲'),
+ (0x2F857, 'M', '報'),
+ (0x2F858, 'M', '墬'),
+ (0x2F859, 'M', '𡓤'),
+ (0x2F85A, 'M', '売'),
+ (0x2F85B, 'M', '壷'),
+ (0x2F85C, 'M', '夆'),
+ (0x2F85D, 'M', '多'),
+ (0x2F85E, 'M', '夢'),
+ (0x2F85F, 'M', '奢'),
+ (0x2F860, 'M', '𡚨'),
+ (0x2F861, 'M', '𡛪'),
+ (0x2F862, 'M', '姬'),
+ (0x2F863, 'M', '娛'),
+ (0x2F864, 'M', '娧'),
+ (0x2F865, 'M', '姘'),
+ (0x2F866, 'M', '婦'),
+ (0x2F867, 'M', '㛮'),
+ (0x2F868, 'X'),
+ (0x2F869, 'M', '嬈'),
+ (0x2F86A, 'M', '嬾'),
+ (0x2F86C, 'M', '𡧈'),
+ (0x2F86D, 'M', '寃'),
+ (0x2F86E, 'M', '寘'),
+ (0x2F86F, 'M', '寧'),
+ (0x2F870, 'M', '寳'),
+ (0x2F871, 'M', '𡬘'),
+ (0x2F872, 'M', '寿'),
+ (0x2F873, 'M', '将'),
+ (0x2F874, 'X'),
+ (0x2F875, 'M', '尢'),
+ (0x2F876, 'M', '㞁'),
+ (0x2F877, 'M', '屠'),
+ (0x2F878, 'M', '屮'),
+ (0x2F879, 'M', '峀'),
+ (0x2F87A, 'M', '岍'),
+ (0x2F87B, 'M', '𡷤'),
+ (0x2F87C, 'M', '嵃'),
+ (0x2F87D, 'M', '𡷦'),
+ (0x2F87E, 'M', '嵮'),
+ (0x2F87F, 'M', '嵫'),
+ (0x2F880, 'M', '嵼'),
+ (0x2F881, 'M', '巡'),
+ (0x2F882, 'M', '巢'),
+ (0x2F883, 'M', '㠯'),
+ (0x2F884, 'M', '巽'),
+ (0x2F885, 'M', '帨'),
+ (0x2F886, 'M', '帽'),
+ (0x2F887, 'M', '幩'),
+ (0x2F888, 'M', '㡢'),
+ (0x2F889, 'M', '𢆃'),
+ (0x2F88A, 'M', '㡼'),
+ (0x2F88B, 'M', '庰'),
+ (0x2F88C, 'M', '庳'),
+ (0x2F88D, 'M', '庶'),
+ (0x2F88E, 'M', '廊'),
+ (0x2F88F, 'M', '𪎒'),
+ (0x2F890, 'M', '廾'),
+ (0x2F891, 'M', '𢌱'),
+ (0x2F893, 'M', '舁'),
+ (0x2F894, 'M', '弢'),
+ (0x2F896, 'M', '㣇'),
+ (0x2F897, 'M', '𣊸'),
+ (0x2F898, 'M', '𦇚'),
+ (0x2F899, 'M', '形'),
+ (0x2F89A, 'M', '彫'),
+ (0x2F89B, 'M', '㣣'),
+ (0x2F89C, 'M', '徚'),
+ (0x2F89D, 'M', '忍'),
+ (0x2F89E, 'M', '志'),
+ (0x2F89F, 'M', '忹'),
+ (0x2F8A0, 'M', '悁'),
+ (0x2F8A1, 'M', '㤺'),
+ (0x2F8A2, 'M', '㤜'),
+ (0x2F8A3, 'M', '悔'),
+ (0x2F8A4, 'M', '𢛔'),
+ (0x2F8A5, 'M', '惇'),
+ (0x2F8A6, 'M', '慈'),
+ (0x2F8A7, 'M', '慌'),
+ (0x2F8A8, 'M', '慎'),
+ ]
+
+def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F8A9, 'M', '慌'),
+ (0x2F8AA, 'M', '慺'),
+ (0x2F8AB, 'M', '憎'),
+ (0x2F8AC, 'M', '憲'),
+ (0x2F8AD, 'M', '憤'),
+ (0x2F8AE, 'M', '憯'),
+ (0x2F8AF, 'M', '懞'),
+ (0x2F8B0, 'M', '懲'),
+ (0x2F8B1, 'M', '懶'),
+ (0x2F8B2, 'M', '成'),
+ (0x2F8B3, 'M', '戛'),
+ (0x2F8B4, 'M', '扝'),
+ (0x2F8B5, 'M', '抱'),
+ (0x2F8B6, 'M', '拔'),
+ (0x2F8B7, 'M', '捐'),
+ (0x2F8B8, 'M', '𢬌'),
+ (0x2F8B9, 'M', '挽'),
+ (0x2F8BA, 'M', '拼'),
+ (0x2F8BB, 'M', '捨'),
+ (0x2F8BC, 'M', '掃'),
+ (0x2F8BD, 'M', '揤'),
+ (0x2F8BE, 'M', '𢯱'),
+ (0x2F8BF, 'M', '搢'),
+ (0x2F8C0, 'M', '揅'),
+ (0x2F8C1, 'M', '掩'),
+ (0x2F8C2, 'M', '㨮'),
+ (0x2F8C3, 'M', '摩'),
+ (0x2F8C4, 'M', '摾'),
+ (0x2F8C5, 'M', '撝'),
+ (0x2F8C6, 'M', '摷'),
+ (0x2F8C7, 'M', '㩬'),
+ (0x2F8C8, 'M', '敏'),
+ (0x2F8C9, 'M', '敬'),
+ (0x2F8CA, 'M', '𣀊'),
+ (0x2F8CB, 'M', '旣'),
+ (0x2F8CC, 'M', '書'),
+ (0x2F8CD, 'M', '晉'),
+ (0x2F8CE, 'M', '㬙'),
+ (0x2F8CF, 'M', '暑'),
+ (0x2F8D0, 'M', '㬈'),
+ (0x2F8D1, 'M', '㫤'),
+ (0x2F8D2, 'M', '冒'),
+ (0x2F8D3, 'M', '冕'),
+ (0x2F8D4, 'M', '最'),
+ (0x2F8D5, 'M', '暜'),
+ (0x2F8D6, 'M', '肭'),
+ (0x2F8D7, 'M', '䏙'),
+ (0x2F8D8, 'M', '朗'),
+ (0x2F8D9, 'M', '望'),
+ (0x2F8DA, 'M', '朡'),
+ (0x2F8DB, 'M', '杞'),
+ (0x2F8DC, 'M', '杓'),
+ (0x2F8DD, 'M', '𣏃'),
+ (0x2F8DE, 'M', '㭉'),
+ (0x2F8DF, 'M', '柺'),
+ (0x2F8E0, 'M', '枅'),
+ (0x2F8E1, 'M', '桒'),
+ (0x2F8E2, 'M', '梅'),
+ (0x2F8E3, 'M', '𣑭'),
+ (0x2F8E4, 'M', '梎'),
+ (0x2F8E5, 'M', '栟'),
+ (0x2F8E6, 'M', '椔'),
+ (0x2F8E7, 'M', '㮝'),
+ (0x2F8E8, 'M', '楂'),
+ (0x2F8E9, 'M', '榣'),
+ (0x2F8EA, 'M', '槪'),
+ (0x2F8EB, 'M', '檨'),
+ (0x2F8EC, 'M', '𣚣'),
+ (0x2F8ED, 'M', '櫛'),
+ (0x2F8EE, 'M', '㰘'),
+ (0x2F8EF, 'M', '次'),
+ (0x2F8F0, 'M', '𣢧'),
+ (0x2F8F1, 'M', '歔'),
+ (0x2F8F2, 'M', '㱎'),
+ (0x2F8F3, 'M', '歲'),
+ (0x2F8F4, 'M', '殟'),
+ (0x2F8F5, 'M', '殺'),
+ (0x2F8F6, 'M', '殻'),
+ (0x2F8F7, 'M', '𣪍'),
+ (0x2F8F8, 'M', '𡴋'),
+ (0x2F8F9, 'M', '𣫺'),
+ (0x2F8FA, 'M', '汎'),
+ (0x2F8FB, 'M', '𣲼'),
+ (0x2F8FC, 'M', '沿'),
+ (0x2F8FD, 'M', '泍'),
+ (0x2F8FE, 'M', '汧'),
+ (0x2F8FF, 'M', '洖'),
+ (0x2F900, 'M', '派'),
+ (0x2F901, 'M', '海'),
+ (0x2F902, 'M', '流'),
+ (0x2F903, 'M', '浩'),
+ (0x2F904, 'M', '浸'),
+ (0x2F905, 'M', '涅'),
+ (0x2F906, 'M', '𣴞'),
+ (0x2F907, 'M', '洴'),
+ (0x2F908, 'M', '港'),
+ (0x2F909, 'M', '湮'),
+ (0x2F90A, 'M', '㴳'),
+ (0x2F90B, 'M', '滋'),
+ (0x2F90C, 'M', '滇'),
+ ]
+
+def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F90D, 'M', '𣻑'),
+ (0x2F90E, 'M', '淹'),
+ (0x2F90F, 'M', '潮'),
+ (0x2F910, 'M', '𣽞'),
+ (0x2F911, 'M', '𣾎'),
+ (0x2F912, 'M', '濆'),
+ (0x2F913, 'M', '瀹'),
+ (0x2F914, 'M', '瀞'),
+ (0x2F915, 'M', '瀛'),
+ (0x2F916, 'M', '㶖'),
+ (0x2F917, 'M', '灊'),
+ (0x2F918, 'M', '災'),
+ (0x2F919, 'M', '灷'),
+ (0x2F91A, 'M', '炭'),
+ (0x2F91B, 'M', '𠔥'),
+ (0x2F91C, 'M', '煅'),
+ (0x2F91D, 'M', '𤉣'),
+ (0x2F91E, 'M', '熜'),
+ (0x2F91F, 'X'),
+ (0x2F920, 'M', '爨'),
+ (0x2F921, 'M', '爵'),
+ (0x2F922, 'M', '牐'),
+ (0x2F923, 'M', '𤘈'),
+ (0x2F924, 'M', '犀'),
+ (0x2F925, 'M', '犕'),
+ (0x2F926, 'M', '𤜵'),
+ (0x2F927, 'M', '𤠔'),
+ (0x2F928, 'M', '獺'),
+ (0x2F929, 'M', '王'),
+ (0x2F92A, 'M', '㺬'),
+ (0x2F92B, 'M', '玥'),
+ (0x2F92C, 'M', '㺸'),
+ (0x2F92E, 'M', '瑇'),
+ (0x2F92F, 'M', '瑜'),
+ (0x2F930, 'M', '瑱'),
+ (0x2F931, 'M', '璅'),
+ (0x2F932, 'M', '瓊'),
+ (0x2F933, 'M', '㼛'),
+ (0x2F934, 'M', '甤'),
+ (0x2F935, 'M', '𤰶'),
+ (0x2F936, 'M', '甾'),
+ (0x2F937, 'M', '𤲒'),
+ (0x2F938, 'M', '異'),
+ (0x2F939, 'M', '𢆟'),
+ (0x2F93A, 'M', '瘐'),
+ (0x2F93B, 'M', '𤾡'),
+ (0x2F93C, 'M', '𤾸'),
+ (0x2F93D, 'M', '𥁄'),
+ (0x2F93E, 'M', '㿼'),
+ (0x2F93F, 'M', '䀈'),
+ (0x2F940, 'M', '直'),
+ (0x2F941, 'M', '𥃳'),
+ (0x2F942, 'M', '𥃲'),
+ (0x2F943, 'M', '𥄙'),
+ (0x2F944, 'M', '𥄳'),
+ (0x2F945, 'M', '眞'),
+ (0x2F946, 'M', '真'),
+ (0x2F948, 'M', '睊'),
+ (0x2F949, 'M', '䀹'),
+ (0x2F94A, 'M', '瞋'),
+ (0x2F94B, 'M', '䁆'),
+ (0x2F94C, 'M', '䂖'),
+ (0x2F94D, 'M', '𥐝'),
+ (0x2F94E, 'M', '硎'),
+ (0x2F94F, 'M', '碌'),
+ (0x2F950, 'M', '磌'),
+ (0x2F951, 'M', '䃣'),
+ (0x2F952, 'M', '𥘦'),
+ (0x2F953, 'M', '祖'),
+ (0x2F954, 'M', '𥚚'),
+ (0x2F955, 'M', '𥛅'),
+ (0x2F956, 'M', '福'),
+ (0x2F957, 'M', '秫'),
+ (0x2F958, 'M', '䄯'),
+ (0x2F959, 'M', '穀'),
+ (0x2F95A, 'M', '穊'),
+ (0x2F95B, 'M', '穏'),
+ (0x2F95C, 'M', '𥥼'),
+ (0x2F95D, 'M', '𥪧'),
+ (0x2F95F, 'X'),
+ (0x2F960, 'M', '䈂'),
+ (0x2F961, 'M', '𥮫'),
+ (0x2F962, 'M', '篆'),
+ (0x2F963, 'M', '築'),
+ (0x2F964, 'M', '䈧'),
+ (0x2F965, 'M', '𥲀'),
+ (0x2F966, 'M', '糒'),
+ (0x2F967, 'M', '䊠'),
+ (0x2F968, 'M', '糨'),
+ (0x2F969, 'M', '糣'),
+ (0x2F96A, 'M', '紀'),
+ (0x2F96B, 'M', '𥾆'),
+ (0x2F96C, 'M', '絣'),
+ (0x2F96D, 'M', '䌁'),
+ (0x2F96E, 'M', '緇'),
+ (0x2F96F, 'M', '縂'),
+ (0x2F970, 'M', '繅'),
+ (0x2F971, 'M', '䌴'),
+ (0x2F972, 'M', '𦈨'),
+ (0x2F973, 'M', '𦉇'),
+ ]
+
+def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F974, 'M', '䍙'),
+ (0x2F975, 'M', '𦋙'),
+ (0x2F976, 'M', '罺'),
+ (0x2F977, 'M', '𦌾'),
+ (0x2F978, 'M', '羕'),
+ (0x2F979, 'M', '翺'),
+ (0x2F97A, 'M', '者'),
+ (0x2F97B, 'M', '𦓚'),
+ (0x2F97C, 'M', '𦔣'),
+ (0x2F97D, 'M', '聠'),
+ (0x2F97E, 'M', '𦖨'),
+ (0x2F97F, 'M', '聰'),
+ (0x2F980, 'M', '𣍟'),
+ (0x2F981, 'M', '䏕'),
+ (0x2F982, 'M', '育'),
+ (0x2F983, 'M', '脃'),
+ (0x2F984, 'M', '䐋'),
+ (0x2F985, 'M', '脾'),
+ (0x2F986, 'M', '媵'),
+ (0x2F987, 'M', '𦞧'),
+ (0x2F988, 'M', '𦞵'),
+ (0x2F989, 'M', '𣎓'),
+ (0x2F98A, 'M', '𣎜'),
+ (0x2F98B, 'M', '舁'),
+ (0x2F98C, 'M', '舄'),
+ (0x2F98D, 'M', '辞'),
+ (0x2F98E, 'M', '䑫'),
+ (0x2F98F, 'M', '芑'),
+ (0x2F990, 'M', '芋'),
+ (0x2F991, 'M', '芝'),
+ (0x2F992, 'M', '劳'),
+ (0x2F993, 'M', '花'),
+ (0x2F994, 'M', '芳'),
+ (0x2F995, 'M', '芽'),
+ (0x2F996, 'M', '苦'),
+ (0x2F997, 'M', '𦬼'),
+ (0x2F998, 'M', '若'),
+ (0x2F999, 'M', '茝'),
+ (0x2F99A, 'M', '荣'),
+ (0x2F99B, 'M', '莭'),
+ (0x2F99C, 'M', '茣'),
+ (0x2F99D, 'M', '莽'),
+ (0x2F99E, 'M', '菧'),
+ (0x2F99F, 'M', '著'),
+ (0x2F9A0, 'M', '荓'),
+ (0x2F9A1, 'M', '菊'),
+ (0x2F9A2, 'M', '菌'),
+ (0x2F9A3, 'M', '菜'),
+ (0x2F9A4, 'M', '𦰶'),
+ (0x2F9A5, 'M', '𦵫'),
+ (0x2F9A6, 'M', '𦳕'),
+ (0x2F9A7, 'M', '䔫'),
+ (0x2F9A8, 'M', '蓱'),
+ (0x2F9A9, 'M', '蓳'),
+ (0x2F9AA, 'M', '蔖'),
+ (0x2F9AB, 'M', '𧏊'),
+ (0x2F9AC, 'M', '蕤'),
+ (0x2F9AD, 'M', '𦼬'),
+ (0x2F9AE, 'M', '䕝'),
+ (0x2F9AF, 'M', '䕡'),
+ (0x2F9B0, 'M', '𦾱'),
+ (0x2F9B1, 'M', '𧃒'),
+ (0x2F9B2, 'M', '䕫'),
+ (0x2F9B3, 'M', '虐'),
+ (0x2F9B4, 'M', '虜'),
+ (0x2F9B5, 'M', '虧'),
+ (0x2F9B6, 'M', '虩'),
+ (0x2F9B7, 'M', '蚩'),
+ (0x2F9B8, 'M', '蚈'),
+ (0x2F9B9, 'M', '蜎'),
+ (0x2F9BA, 'M', '蛢'),
+ (0x2F9BB, 'M', '蝹'),
+ (0x2F9BC, 'M', '蜨'),
+ (0x2F9BD, 'M', '蝫'),
+ (0x2F9BE, 'M', '螆'),
+ (0x2F9BF, 'X'),
+ (0x2F9C0, 'M', '蟡'),
+ (0x2F9C1, 'M', '蠁'),
+ (0x2F9C2, 'M', '䗹'),
+ (0x2F9C3, 'M', '衠'),
+ (0x2F9C4, 'M', '衣'),
+ (0x2F9C5, 'M', '𧙧'),
+ (0x2F9C6, 'M', '裗'),
+ (0x2F9C7, 'M', '裞'),
+ (0x2F9C8, 'M', '䘵'),
+ (0x2F9C9, 'M', '裺'),
+ (0x2F9CA, 'M', '㒻'),
+ (0x2F9CB, 'M', '𧢮'),
+ (0x2F9CC, 'M', '𧥦'),
+ (0x2F9CD, 'M', '䚾'),
+ (0x2F9CE, 'M', '䛇'),
+ (0x2F9CF, 'M', '誠'),
+ (0x2F9D0, 'M', '諭'),
+ (0x2F9D1, 'M', '變'),
+ (0x2F9D2, 'M', '豕'),
+ (0x2F9D3, 'M', '𧲨'),
+ (0x2F9D4, 'M', '貫'),
+ (0x2F9D5, 'M', '賁'),
+ (0x2F9D6, 'M', '贛'),
+ (0x2F9D7, 'M', '起'),
+ ]
+
+def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F9D8, 'M', '𧼯'),
+ (0x2F9D9, 'M', '𠠄'),
+ (0x2F9DA, 'M', '跋'),
+ (0x2F9DB, 'M', '趼'),
+ (0x2F9DC, 'M', '跰'),
+ (0x2F9DD, 'M', '𠣞'),
+ (0x2F9DE, 'M', '軔'),
+ (0x2F9DF, 'M', '輸'),
+ (0x2F9E0, 'M', '𨗒'),
+ (0x2F9E1, 'M', '𨗭'),
+ (0x2F9E2, 'M', '邔'),
+ (0x2F9E3, 'M', '郱'),
+ (0x2F9E4, 'M', '鄑'),
+ (0x2F9E5, 'M', '𨜮'),
+ (0x2F9E6, 'M', '鄛'),
+ (0x2F9E7, 'M', '鈸'),
+ (0x2F9E8, 'M', '鋗'),
+ (0x2F9E9, 'M', '鋘'),
+ (0x2F9EA, 'M', '鉼'),
+ (0x2F9EB, 'M', '鏹'),
+ (0x2F9EC, 'M', '鐕'),
+ (0x2F9ED, 'M', '𨯺'),
+ (0x2F9EE, 'M', '開'),
+ (0x2F9EF, 'M', '䦕'),
+ (0x2F9F0, 'M', '閷'),
+ (0x2F9F1, 'M', '𨵷'),
+ (0x2F9F2, 'M', '䧦'),
+ (0x2F9F3, 'M', '雃'),
+ (0x2F9F4, 'M', '嶲'),
+ (0x2F9F5, 'M', '霣'),
+ (0x2F9F6, 'M', '𩅅'),
+ (0x2F9F7, 'M', '𩈚'),
+ (0x2F9F8, 'M', '䩮'),
+ (0x2F9F9, 'M', '䩶'),
+ (0x2F9FA, 'M', '韠'),
+ (0x2F9FB, 'M', '𩐊'),
+ (0x2F9FC, 'M', '䪲'),
+ (0x2F9FD, 'M', '𩒖'),
+ (0x2F9FE, 'M', '頋'),
+ (0x2FA00, 'M', '頩'),
+ (0x2FA01, 'M', '𩖶'),
+ (0x2FA02, 'M', '飢'),
+ (0x2FA03, 'M', '䬳'),
+ (0x2FA04, 'M', '餩'),
+ (0x2FA05, 'M', '馧'),
+ (0x2FA06, 'M', '駂'),
+ (0x2FA07, 'M', '駾'),
+ (0x2FA08, 'M', '䯎'),
+ (0x2FA09, 'M', '𩬰'),
+ (0x2FA0A, 'M', '鬒'),
+ (0x2FA0B, 'M', '鱀'),
+ (0x2FA0C, 'M', '鳽'),
+ (0x2FA0D, 'M', '䳎'),
+ (0x2FA0E, 'M', '䳭'),
+ (0x2FA0F, 'M', '鵧'),
+ (0x2FA10, 'M', '𪃎'),
+ (0x2FA11, 'M', '䳸'),
+ (0x2FA12, 'M', '𪄅'),
+ (0x2FA13, 'M', '𪈎'),
+ (0x2FA14, 'M', '𪊑'),
+ (0x2FA15, 'M', '麻'),
+ (0x2FA16, 'M', '䵖'),
+ (0x2FA17, 'M', '黹'),
+ (0x2FA18, 'M', '黾'),
+ (0x2FA19, 'M', '鼅'),
+ (0x2FA1A, 'M', '鼏'),
+ (0x2FA1B, 'M', '鼖'),
+ (0x2FA1C, 'M', '鼻'),
+ (0x2FA1D, 'M', '𪘀'),
+ (0x2FA1E, 'X'),
+ (0x30000, 'V'),
+ (0x3134B, 'X'),
+ (0x31350, 'V'),
+ (0x323B0, 'X'),
+ (0xE0100, 'I'),
+ (0xE01F0, 'X'),
+ ]
+
+uts46data = tuple(
+ _seg_0()
+ + _seg_1()
+ + _seg_2()
+ + _seg_3()
+ + _seg_4()
+ + _seg_5()
+ + _seg_6()
+ + _seg_7()
+ + _seg_8()
+ + _seg_9()
+ + _seg_10()
+ + _seg_11()
+ + _seg_12()
+ + _seg_13()
+ + _seg_14()
+ + _seg_15()
+ + _seg_16()
+ + _seg_17()
+ + _seg_18()
+ + _seg_19()
+ + _seg_20()
+ + _seg_21()
+ + _seg_22()
+ + _seg_23()
+ + _seg_24()
+ + _seg_25()
+ + _seg_26()
+ + _seg_27()
+ + _seg_28()
+ + _seg_29()
+ + _seg_30()
+ + _seg_31()
+ + _seg_32()
+ + _seg_33()
+ + _seg_34()
+ + _seg_35()
+ + _seg_36()
+ + _seg_37()
+ + _seg_38()
+ + _seg_39()
+ + _seg_40()
+ + _seg_41()
+ + _seg_42()
+ + _seg_43()
+ + _seg_44()
+ + _seg_45()
+ + _seg_46()
+ + _seg_47()
+ + _seg_48()
+ + _seg_49()
+ + _seg_50()
+ + _seg_51()
+ + _seg_52()
+ + _seg_53()
+ + _seg_54()
+ + _seg_55()
+ + _seg_56()
+ + _seg_57()
+ + _seg_58()
+ + _seg_59()
+ + _seg_60()
+ + _seg_61()
+ + _seg_62()
+ + _seg_63()
+ + _seg_64()
+ + _seg_65()
+ + _seg_66()
+ + _seg_67()
+ + _seg_68()
+ + _seg_69()
+ + _seg_70()
+ + _seg_71()
+ + _seg_72()
+ + _seg_73()
+ + _seg_74()
+ + _seg_75()
+ + _seg_76()
+ + _seg_77()
+ + _seg_78()
+ + _seg_79()
+ + _seg_80()
+ + _seg_81()
+) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...]
diff --git a/Lib/site-packages/imageio-2.33.1.dist-info/INSTALLER b/Lib/site-packages/imageio-2.33.1.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/Lib/site-packages/imageio-2.33.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Lib/site-packages/imageio-2.33.1.dist-info/LICENSE b/Lib/site-packages/imageio-2.33.1.dist-info/LICENSE
new file mode 100644
index 0000000..33b1352
--- /dev/null
+++ b/Lib/site-packages/imageio-2.33.1.dist-info/LICENSE
@@ -0,0 +1,24 @@
+Copyright (c) 2014-2022, imageio developers
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
diff --git a/Lib/site-packages/imageio-2.33.1.dist-info/METADATA b/Lib/site-packages/imageio-2.33.1.dist-info/METADATA
new file mode 100644
index 0000000..593e955
--- /dev/null
+++ b/Lib/site-packages/imageio-2.33.1.dist-info/METADATA
@@ -0,0 +1,133 @@
+Metadata-Version: 2.1
+Name: imageio
+Version: 2.33.1
+Summary: Library for reading and writing a wide range of image, video, scientific, and volumetric data formats.
+Home-page: https://github.com/imageio/imageio
+Download-URL: http://pypi.python.org/pypi/imageio
+Author: imageio contributors
+Author-email: almar.klein@gmail.com
+License: BSD-2-Clause
+Keywords: image video volume imread imwrite io animation ffmpeg
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Science/Research
+Classifier: Intended Audience :: Education
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: POSIX
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Provides: imageio
+Requires-Python: >=3.8
+License-File: LICENSE
+Requires-Dist: numpy
+Requires-Dist: pillow >=8.3.2
+Provides-Extra: all-plugins
+Requires-Dist: astropy ; extra == 'all-plugins'
+Requires-Dist: av ; extra == 'all-plugins'
+Requires-Dist: imageio-ffmpeg ; extra == 'all-plugins'
+Requires-Dist: pillow-heif ; extra == 'all-plugins'
+Requires-Dist: psutil ; extra == 'all-plugins'
+Requires-Dist: tifffile ; extra == 'all-plugins'
+Provides-Extra: all-plugins-pypy
+Requires-Dist: av ; extra == 'all-plugins-pypy'
+Requires-Dist: imageio-ffmpeg ; extra == 'all-plugins-pypy'
+Requires-Dist: pillow-heif ; extra == 'all-plugins-pypy'
+Requires-Dist: psutil ; extra == 'all-plugins-pypy'
+Requires-Dist: tifffile ; extra == 'all-plugins-pypy'
+Provides-Extra: bsdf
+Provides-Extra: build
+Requires-Dist: wheel ; extra == 'build'
+Provides-Extra: dev
+Requires-Dist: pytest ; extra == 'dev'
+Requires-Dist: pytest-cov ; extra == 'dev'
+Requires-Dist: fsspec[github] ; extra == 'dev'
+Requires-Dist: black ; extra == 'dev'
+Requires-Dist: flake8 ; extra == 'dev'
+Provides-Extra: dicom
+Provides-Extra: docs
+Requires-Dist: sphinx <6 ; extra == 'docs'
+Requires-Dist: numpydoc ; extra == 'docs'
+Requires-Dist: pydata-sphinx-theme ; extra == 'docs'
+Provides-Extra: feisem
+Provides-Extra: ffmpeg
+Requires-Dist: imageio-ffmpeg ; extra == 'ffmpeg'
+Requires-Dist: psutil ; extra == 'ffmpeg'
+Provides-Extra: fits
+Requires-Dist: astropy ; extra == 'fits'
+Provides-Extra: freeimage
+Provides-Extra: full
+Requires-Dist: astropy ; extra == 'full'
+Requires-Dist: av ; extra == 'full'
+Requires-Dist: black ; extra == 'full'
+Requires-Dist: flake8 ; extra == 'full'
+Requires-Dist: fsspec[github] ; extra == 'full'
+Requires-Dist: gdal ; extra == 'full'
+Requires-Dist: imageio-ffmpeg ; extra == 'full'
+Requires-Dist: itk ; extra == 'full'
+Requires-Dist: numpydoc ; extra == 'full'
+Requires-Dist: pillow-heif ; extra == 'full'
+Requires-Dist: psutil ; extra == 'full'
+Requires-Dist: pydata-sphinx-theme ; extra == 'full'
+Requires-Dist: pytest ; extra == 'full'
+Requires-Dist: pytest-cov ; extra == 'full'
+Requires-Dist: sphinx <6 ; extra == 'full'
+Requires-Dist: tifffile ; extra == 'full'
+Requires-Dist: wheel ; extra == 'full'
+Provides-Extra: gdal
+Requires-Dist: gdal ; extra == 'gdal'
+Provides-Extra: itk
+Requires-Dist: itk ; extra == 'itk'
+Provides-Extra: linting
+Requires-Dist: black ; extra == 'linting'
+Requires-Dist: flake8 ; extra == 'linting'
+Provides-Extra: lytro
+Provides-Extra: numpy
+Provides-Extra: pillow
+Provides-Extra: pillow-heif
+Requires-Dist: pillow-heif ; extra == 'pillow-heif'
+Provides-Extra: pyav
+Requires-Dist: av ; extra == 'pyav'
+Provides-Extra: simpleitk
+Provides-Extra: spe
+Provides-Extra: swf
+Provides-Extra: test
+Requires-Dist: pytest ; extra == 'test'
+Requires-Dist: pytest-cov ; extra == 'test'
+Requires-Dist: fsspec[github] ; extra == 'test'
+Provides-Extra: tifffile
+Requires-Dist: tifffile ; extra == 'tifffile'
+
+
+.. image:: https://github.com/imageio/imageio/workflows/CI/badge.svg
+ :target: https://github.com/imageio/imageio/actions
+
+
+Imageio is a Python library that provides an easy interface to read and
+write a wide range of image data, including animated images, volumetric
+data, and scientific formats. It is cross-platform, runs on Python 3.5+,
+and is easy to install.
+
+Main website: https://imageio.readthedocs.io/
+
+
+Release notes: https://github.com/imageio/imageio/blob/master/CHANGELOG.md
+
+Example:
+
+.. code-block:: python
+
+ >>> import imageio
+ >>> im = imageio.imread('imageio:astronaut.png')
+ >>> im.shape # im is a numpy array
+ (512, 512, 3)
+ >>> imageio.imwrite('astronaut-gray.jpg', im[:, :, 0])
+
+See the `API Reference `_
+or `examples `_
+for more information.
diff --git a/Lib/site-packages/imageio-2.33.1.dist-info/RECORD b/Lib/site-packages/imageio-2.33.1.dist-info/RECORD
new file mode 100644
index 0000000..74088da
--- /dev/null
+++ b/Lib/site-packages/imageio-2.33.1.dist-info/RECORD
@@ -0,0 +1,115 @@
+../../Scripts/imageio_download_bin.exe,sha256=Pg0VxBHTlHdcPpayal5qEwvcOZIa8Fjf-W3g7ruYlpk,108447
+../../Scripts/imageio_remove_bin.exe,sha256=fCONANNI4QqgNBn3MYbDfN6Q8vGtigVhsqOSvoojq0w,108443
+imageio-2.33.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+imageio-2.33.1.dist-info/LICENSE,sha256=rlmepQpJTvtyXkIKqzXR91kgDP5BhrbGSjC6Sds_0GQ,1307
+imageio-2.33.1.dist-info/METADATA,sha256=rOiFifD1NnCesz6b2SAocovjhlm9GZt1jnCZ2WVQY5Y,4875
+imageio-2.33.1.dist-info/RECORD,,
+imageio-2.33.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+imageio-2.33.1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
+imageio-2.33.1.dist-info/entry_points.txt,sha256=0-yB6XGfrx1OMPw_xigPramTcwi5M4jX6L5Edrz0OoU,130
+imageio-2.33.1.dist-info/top_level.txt,sha256=iSUjc-wEw-xbMTvMOSKg85n0-E7Ms--Mo4FLMC-J2YM,8
+imageio/__init__.py,sha256=x1Z-E9qCPOiR-RSkWCTVyQDvCbQgrIMJ4pJbNdydqIU,3272
+imageio/__main__.py,sha256=s5nidb9wRZ6AbimHTPHULt3sTXPx4mqNil67KJHZvd4,5393
+imageio/__pycache__/__init__.cpython-312.pyc,,
+imageio/__pycache__/__main__.cpython-312.pyc,,
+imageio/__pycache__/freeze.cpython-312.pyc,,
+imageio/__pycache__/testing.cpython-312.pyc,,
+imageio/__pycache__/typing.cpython-312.pyc,,
+imageio/__pycache__/v2.cpython-312.pyc,,
+imageio/__pycache__/v3.cpython-312.pyc,,
+imageio/config/__init__.py,sha256=8NOpL5ePrkiioJb9hRBw3rydc4iNZkMwp7VdQlP4jDc,307
+imageio/config/__pycache__/__init__.cpython-312.pyc,,
+imageio/config/__pycache__/extensions.cpython-312.pyc,,
+imageio/config/__pycache__/plugins.cpython-312.pyc,,
+imageio/config/extensions.py,sha256=2pXqdJLXn4XDvbVml4Efzfjw9smw9ROM--JE9_db-tc,47004
+imageio/config/extensions.pyi,sha256=sLrA-wt09kPHBDJP79tGtEOX7XTcEEjRzA70O8BCsD0,605
+imageio/config/plugins.py,sha256=j7suVaEDiQwutMXcBZPxO_OA7G_7STwhCaZ-8o2zwio,20157
+imageio/config/plugins.pyi,sha256=pzH8pacqU5uldsvYOee_nhd2Hkk3mR8VQBtjeVnkkHY,706
+imageio/core/__init__.py,sha256=PSkGH8K76ntSWhwM4j7W49UmCSZf_OGaSl9fNbQP7uQ,639
+imageio/core/__pycache__/__init__.cpython-312.pyc,,
+imageio/core/__pycache__/fetching.cpython-312.pyc,,
+imageio/core/__pycache__/findlib.cpython-312.pyc,,
+imageio/core/__pycache__/format.cpython-312.pyc,,
+imageio/core/__pycache__/imopen.cpython-312.pyc,,
+imageio/core/__pycache__/legacy_plugin_wrapper.cpython-312.pyc,,
+imageio/core/__pycache__/request.cpython-312.pyc,,
+imageio/core/__pycache__/util.cpython-312.pyc,,
+imageio/core/__pycache__/v3_plugin_api.cpython-312.pyc,,
+imageio/core/fetching.py,sha256=r81yBsJMqkwAXeVAuQuAzbk9etWxQUEUe4__UUjpQpc,9176
+imageio/core/findlib.py,sha256=Zrhs0rEyp8p8iSIuCoBco0dCaB5dxJVZ4lRgv82Sqm0,5552
+imageio/core/format.py,sha256=glQcJOZHEOST3u0jOa338ZxJBX_daEe6xl7-UKxuU6E,30917
+imageio/core/format.pyi,sha256=5BZF-xwp5BmG8C5ahfL48z_a2MITN0509Uf6f1phZRw,3336
+imageio/core/imopen.py,sha256=SA4OJj93B09CHsKSILdH1w3zdVWvRSopNWlGlS0f4t0,9752
+imageio/core/imopen.pyi,sha256=QcVF5tUjy6qrAK2P5J_9wj2Heb3dt9Uyz3RKZpJCfjE,1982
+imageio/core/legacy_plugin_wrapper.py,sha256=CYGXhJY-18HkVYqyzlepM7NcZ9VLvBjFjNj64HOBqBM,12136
+imageio/core/legacy_plugin_wrapper.pyi,sha256=ENmdth_Avp2yTzuyInGWT2QXgAv72RrFRd6QH71LVqU,1064
+imageio/core/request.py,sha256=vG5n2gAu4GUdsBdajcUCalSnJJSv0wdzad4DT3iDIF8,26826
+imageio/core/request.pyi,sha256=ivqAXs3UfxhuXQfg8qsAtEVymCsppPwadztFzSXpIAo,2315
+imageio/core/util.py,sha256=Gt4NiZYKXjeB5AgyiHOFi4ntn7iTcSjj8X_kDz2R6DM,18657
+imageio/core/v3_plugin_api.py,sha256=w8wUjlT7_N6aU76DYGF3ubYYfUHTyfStvK5_xosZLPQ,15560
+imageio/freeze.py,sha256=hi9MNZz-ridgQBWcAqnd92sULek2lgmBSTmuott5lus,170
+imageio/plugins/__init__.py,sha256=GSxtio0ph5QHP2asdLvyzW8lVfiRqOii8kaqYsBO9CE,3469
+imageio/plugins/__pycache__/__init__.cpython-312.pyc,,
+imageio/plugins/__pycache__/_bsdf.cpython-312.pyc,,
+imageio/plugins/__pycache__/_dicom.cpython-312.pyc,,
+imageio/plugins/__pycache__/_freeimage.cpython-312.pyc,,
+imageio/plugins/__pycache__/_swf.cpython-312.pyc,,
+imageio/plugins/__pycache__/_tifffile.cpython-312.pyc,,
+imageio/plugins/__pycache__/bsdf.cpython-312.pyc,,
+imageio/plugins/__pycache__/dicom.cpython-312.pyc,,
+imageio/plugins/__pycache__/example.cpython-312.pyc,,
+imageio/plugins/__pycache__/feisem.cpython-312.pyc,,
+imageio/plugins/__pycache__/ffmpeg.cpython-312.pyc,,
+imageio/plugins/__pycache__/fits.cpython-312.pyc,,
+imageio/plugins/__pycache__/freeimage.cpython-312.pyc,,
+imageio/plugins/__pycache__/freeimagemulti.cpython-312.pyc,,
+imageio/plugins/__pycache__/gdal.cpython-312.pyc,,
+imageio/plugins/__pycache__/grab.cpython-312.pyc,,
+imageio/plugins/__pycache__/lytro.cpython-312.pyc,,
+imageio/plugins/__pycache__/npz.cpython-312.pyc,,
+imageio/plugins/__pycache__/opencv.cpython-312.pyc,,
+imageio/plugins/__pycache__/pillow.cpython-312.pyc,,
+imageio/plugins/__pycache__/pillow_info.cpython-312.pyc,,
+imageio/plugins/__pycache__/pillow_legacy.cpython-312.pyc,,
+imageio/plugins/__pycache__/pillowmulti.cpython-312.pyc,,
+imageio/plugins/__pycache__/pyav.cpython-312.pyc,,
+imageio/plugins/__pycache__/simpleitk.cpython-312.pyc,,
+imageio/plugins/__pycache__/spe.cpython-312.pyc,,
+imageio/plugins/__pycache__/swf.cpython-312.pyc,,
+imageio/plugins/__pycache__/tifffile.cpython-312.pyc,,
+imageio/plugins/__pycache__/tifffile_v3.cpython-312.pyc,,
+imageio/plugins/_bsdf.py,sha256=b-QjkZvz9DPDbygiKhee-47Ld2eOqxpYEdZ1mnrRPJ4,32753
+imageio/plugins/_dicom.py,sha256=Ub8KWS0rS2GliKqfjHLj1N0Mg8iI2lSqVwEcEdC6TiE,33834
+imageio/plugins/_freeimage.py,sha256=GD25ZqqvbFnBILPRYHrTb5qbFsvXBVKv_qIE3139D68,51740
+imageio/plugins/_swf.py,sha256=kh3H2v98bgHpVagGNbhGUodh0s-weiESraX6qzMnD2k,25760
+imageio/plugins/_tifffile.py,sha256=9MO4zhogZcBHoh8AkjqP52j0W-FaDji1__VqTMf6idU,371590
+imageio/plugins/bsdf.py,sha256=spISvLLVH319wDJ8YhYcvDTaJe2acElgWSvgqEkpd_g,12852
+imageio/plugins/dicom.py,sha256=mQYNbTyum4jVhjZQ8TU-4A5csHpQfT-BRBBCP5fu6Zs,12621
+imageio/plugins/example.py,sha256=4POb_LDQtSxHWxiflGqGKKKKrpItqLIFQeU8x7tro-c,5501
+imageio/plugins/feisem.py,sha256=AKwZv7Zac0_grnr-wnzU7R0Zf2KSUe91k06evPa1NI8,3360
+imageio/plugins/ffmpeg.py,sha256=N8Qq1TU5gr7U9IM-FCEuM9VIy1Jv875OC_XorStoOPI,29930
+imageio/plugins/fits.py,sha256=XnlmeC79sIiIPd_7IDx05-p3-b2unO4CVR0nWAA4ph0,4531
+imageio/plugins/freeimage.py,sha256=SuzYuGvCtZIiXIr51dWRTl5CATzRUqb8pNCSIg9YZv8,14645
+imageio/plugins/freeimagemulti.py,sha256=7jW3mJX-ZVnDqe2veIvU9wPY_x0EBOmPKP8ppPxRO_M,11288
+imageio/plugins/gdal.py,sha256=r2Ux7MQeHCUsmdk0aGENzGX8M5hCBU7NJomcf6G8FCU,1653
+imageio/plugins/grab.py,sha256=g6KbKVQUquHro_BW6He7NNmivVV-UtcsCJoDt3rdly0,2776
+imageio/plugins/lytro.py,sha256=V3dToE-eV6jLhtae26_uHHgOx6O1LsOo0hm7nnRptMM,25310
+imageio/plugins/npz.py,sha256=7ZQr-4lQEKbfjaF6rOmpq9pQgDTUHvkZa_NHZkJWBQo,2670
+imageio/plugins/opencv.py,sha256=6mYrbGXOdRm8M2_lx1Z6ufqk2_L-I7mHRa5ksoNFwxU,11630
+imageio/plugins/pillow.py,sha256=4siuR0UENadfQgdQ2z5bFWX464KMzMcfqIEKEBDzt6M,22318
+imageio/plugins/pillow_info.py,sha256=Bt5iJtQnAh6mGViPIxhxRQPNidqay9-6BleAJZkhN1w,36624
+imageio/plugins/pillow_legacy.py,sha256=nduUoks0Jp4fbizSBDqGCD__hVLQdRB6dgEbyfjOtHE,31714
+imageio/plugins/pillowmulti.py,sha256=-wsWpq0j2WXDgQGbyUuzCmw7iqSDz7e6AYqYhs46ZE8,11807
+imageio/plugins/pyav.py,sha256=AkB4hmzmu0SLvVUt2xajQWo3jzSFKuB2YRnCRjaunEk,44700
+imageio/plugins/simpleitk.py,sha256=ldQWjkiCSZPoUnN87MtUqRIMMcIKmk8ZUeyDCQhnpG0,4107
+imageio/plugins/spe.py,sha256=UyXgHZV-3gwwU-RmJUhgDnJ843wt9H3S3Fjs84faz38,32172
+imageio/plugins/swf.py,sha256=0B9f-HF528OcHXTIF3nptoSJUu4GNId03rFLfFFOaFk,11756
+imageio/plugins/tifffile.py,sha256=m8qgNy-lJkwHwKkyp3pZn2xYsnRRwZ8FVMpM-BIs6dI,20665
+imageio/plugins/tifffile_v3.py,sha256=Vs2ngBBptUoJ6QpT9EjyNd4-dih8zzGEvcq2mRNYFXg,14335
+imageio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+imageio/testing.py,sha256=tkRPxZZpG68q_MAIux8WE8QeKbhbq6rDPVfCDsof1Ms,1597
+imageio/typing.py,sha256=qrvyFrVIs21bZCE0x802l1R-xCV4DlCNaTzPiJEZbzc,349
+imageio/v2.py,sha256=1KJ5z8Ji2nnAdy_K3vIpysG2Kg7rIcPiadNG1pwKx-E,21563
+imageio/v2.pyi,sha256=ROazbwu1rSJLBaEtXmUG2oT9BMr7ZlyyW26twgFWx5E,2250
+imageio/v3.py,sha256=ZE0IlERPT_4wryYqUOD4-LLc6dVpDZXV6N6JEQtbMiQ,9267
+imageio/v3.pyi,sha256=AtLP0IWqS-sX1qDyHPdjCCIsKGwXU5z41XOXzUj2pGQ,1344
diff --git a/Lib/site-packages/imageio-2.33.1.dist-info/REQUESTED b/Lib/site-packages/imageio-2.33.1.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/Lib/site-packages/imageio-2.33.1.dist-info/WHEEL b/Lib/site-packages/imageio-2.33.1.dist-info/WHEEL
new file mode 100644
index 0000000..98c0d20
--- /dev/null
+++ b/Lib/site-packages/imageio-2.33.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.42.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/Lib/site-packages/imageio-2.33.1.dist-info/entry_points.txt b/Lib/site-packages/imageio-2.33.1.dist-info/entry_points.txt
new file mode 100644
index 0000000..aa30161
--- /dev/null
+++ b/Lib/site-packages/imageio-2.33.1.dist-info/entry_points.txt
@@ -0,0 +1,3 @@
+[console_scripts]
+imageio_download_bin = imageio.__main__:download_bin_main
+imageio_remove_bin = imageio.__main__:remove_bin_main
diff --git a/Lib/site-packages/imageio-2.33.1.dist-info/top_level.txt b/Lib/site-packages/imageio-2.33.1.dist-info/top_level.txt
new file mode 100644
index 0000000..a464e4c
--- /dev/null
+++ b/Lib/site-packages/imageio-2.33.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+imageio
diff --git a/Lib/site-packages/imageio/__init__.py b/Lib/site-packages/imageio/__init__.py
new file mode 100644
index 0000000..8903fd3
--- /dev/null
+++ b/Lib/site-packages/imageio/__init__.py
@@ -0,0 +1,131 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014-2020, imageio contributors
+# imageio is distributed under the terms of the (new) BSD License.
+
+# This docstring is used at the index of the documentation pages, and
+# gets inserted into a slightly larger description (in setup.py) for
+# the page on Pypi:
+"""
+Imageio is a Python library that provides an easy interface to read and
+write a wide range of image data, including animated images, volumetric
+data, and scientific formats. It is cross-platform, runs on Python 3.5+,
+and is easy to install.
+
+Main website: https://imageio.readthedocs.io/
+"""
+
+# flake8: noqa
+
+__version__ = "2.33.1"
+
+import warnings
+
+# Load some bits from core
+from .core import FormatManager, RETURN_BYTES
+
+# Instantiate the old format manager
+formats = FormatManager()
+show_formats = formats.show
+
+from . import v2
+from . import v3
+from . import plugins
+
+# import config after core to avoid circular import
+from . import config
+
+# import all APIs into the top level (meta API)
+from .v2 import (
+ imread as imread_v2,
+ mimread,
+ volread,
+ mvolread,
+ imwrite,
+ mimwrite,
+ volwrite,
+ mvolwrite,
+ # aliases
+ get_reader as read,
+ get_writer as save,
+ imwrite as imsave,
+ mimwrite as mimsave,
+ volwrite as volsave,
+ mvolwrite as mvolsave,
+ # misc
+ help,
+ get_reader,
+ get_writer,
+)
+from .v3 import (
+ imopen,
+ # imread, # Will take over once v3 is released
+ # imwrite, # Will take over once v3 is released
+ imiter,
+)
+
+
+def imread(uri, format=None, **kwargs):
+ """imread(uri, format=None, **kwargs)
+
+ Reads an image from the specified file. Returns a numpy array, which
+ comes with a dict of meta data at its 'meta' attribute.
+
+ Note that the image data is returned as-is, and may not always have
+ a dtype of uint8 (and thus may differ from what e.g. PIL returns).
+
+ Parameters
+ ----------
+ uri : {str, pathlib.Path, bytes, file}
+ The resource to load the image from, e.g. a filename, pathlib.Path,
+ http address or file object, see the docs for more info.
+ format : str
+ The format to use to read the file. By default imageio selects
+ the appropriate for you based on the filename and its contents.
+ kwargs : ...
+ Further keyword arguments are passed to the reader. See :func:`.help`
+ to see what arguments are available for a particular format.
+ """
+
+ warnings.warn(
+ "Starting with ImageIO v3 the behavior of this function will switch to that of"
+ " iio.v3.imread. To keep the current behavior (and make this warning disappear)"
+ " use `import imageio.v2 as imageio` or call `imageio.v2.imread` directly.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ return imread_v2(uri, format=format, **kwargs)
+
+
+__all__ = [
+ "v2",
+ "v3",
+ "config",
+ "plugins",
+ # v3 API
+ "imopen",
+ "imread",
+ "imwrite",
+ "imiter",
+ # v2 API
+ "mimread",
+ "volread",
+ "mvolread",
+ "imwrite",
+ "mimwrite",
+ "volwrite",
+ "mvolwrite",
+ # v2 aliases
+ "read",
+ "save",
+ "imsave",
+ "mimsave",
+ "volsave",
+ "mvolsave",
+ # functions to deprecate
+ "help",
+ "get_reader",
+ "get_writer",
+ "formats",
+ "show_formats",
+]
diff --git a/Lib/site-packages/imageio/__main__.py b/Lib/site-packages/imageio/__main__.py
new file mode 100644
index 0000000..ad0ea0b
--- /dev/null
+++ b/Lib/site-packages/imageio/__main__.py
@@ -0,0 +1,169 @@
+"""
+Console scripts and associated helper methods for imageio.
+"""
+
+import argparse
+import os
+from os import path as op
+import shutil
+import sys
+
+
+from . import plugins
+from .core import util
+
+# A list of plugins that require binaries from the imageio-binaries
+# repository. These plugins must implement the `download` method.
+PLUGINS_WITH_BINARIES = ["freeimage"]
+
+
+def download_bin(plugin_names=["all"], package_dir=False):
+ """Download binary dependencies of plugins
+
+ This is a convenience method for downloading the binaries
+ (e.g. for freeimage) from the imageio-binaries
+ repository.
+
+ Parameters
+ ----------
+ plugin_names: list
+ A list of imageio plugin names. If it contains "all", all
+ binary dependencies are downloaded.
+ package_dir: bool
+ If set to `True`, the binaries will be downloaded to the
+ `resources` directory of the imageio package instead of
+ to the users application data directory. Note that this
+ might require administrative rights if imageio is installed
+ in a system directory.
+ """
+ if plugin_names.count("all"):
+ # Use all plugins
+ plugin_names = PLUGINS_WITH_BINARIES
+
+ plugin_names.sort()
+ print("Ascertaining binaries for: {}.".format(", ".join(plugin_names)))
+
+ if package_dir:
+ # Download the binaries to the `resources` directory
+ # of imageio. If imageio comes as an .egg, then a cache
+ # directory will be created by pkg_resources (requires setuptools).
+ # see `imageio.core.util.resource_dirs`
+ # and `imageio.core.utilresource_package_dir`
+ directory = util.resource_package_dir()
+ else:
+ directory = None
+
+ for plg in plugin_names:
+ if plg not in PLUGINS_WITH_BINARIES:
+ msg = "Plugin {} not registered for binary download!".format(plg)
+ raise Exception(msg)
+ mod = getattr(plugins, plg)
+ mod.download(directory=directory)
+
+
+def download_bin_main():
+ """Argument-parsing wrapper for `download_bin`"""
+ description = "Download plugin binary dependencies"
+ phelp = (
+ "Plugin name for which to download the binary. "
+ + "If no argument is given, all binaries are downloaded."
+ )
+ dhelp = (
+ "Download the binaries to the package directory "
+ + "(default is the users application data directory). "
+ + "This might require administrative rights."
+ )
+ example_text = (
+ "examples:\n"
+ + " imageio_download_bin all\n"
+ + " imageio_download_bin freeimage\n"
+ )
+ parser = argparse.ArgumentParser(
+ description=description,
+ epilog=example_text,
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ )
+ parser.add_argument("plugin", type=str, nargs="*", default="all", help=phelp)
+ parser.add_argument(
+ "--package-dir",
+ dest="package_dir",
+ action="store_true",
+ default=False,
+ help=dhelp,
+ )
+ args = parser.parse_args()
+ download_bin(plugin_names=args.plugin, package_dir=args.package_dir)
+
+
+def remove_bin(plugin_names=["all"]):
+ """Remove binary dependencies of plugins
+
+ This is a convenience method that removes all binaries
+ dependencies for plugins downloaded by imageio.
+
+ Notes
+ -----
+ It only makes sense to use this method if the binaries
+ are corrupt.
+ """
+ if plugin_names.count("all"):
+ # Use all plugins
+ plugin_names = PLUGINS_WITH_BINARIES
+
+ print("Removing binaries for: {}.".format(", ".join(plugin_names)))
+
+ rdirs = util.resource_dirs()
+
+ for plg in plugin_names:
+ if plg not in PLUGINS_WITH_BINARIES:
+ msg = "Plugin {} not registered for binary download!".format(plg)
+ raise Exception(msg)
+
+ not_removed = []
+ for rd in rdirs:
+ # plugin name is in subdirectories
+ for rsub in os.listdir(rd):
+ if rsub in plugin_names:
+ plgdir = op.join(rd, rsub)
+ try:
+ shutil.rmtree(plgdir)
+ except Exception:
+ not_removed.append(plgdir)
+ if not_removed:
+ nrs = ",".join(not_removed)
+ msg2 = (
+ "These plugins files could not be removed: {}\n".format(nrs)
+ + "Make sure they are not used by any program and try again."
+ )
+ raise Exception(msg2)
+
+
+def remove_bin_main():
+ """Argument-parsing wrapper for `remove_bin`"""
+ description = "Remove plugin binary dependencies"
+ phelp = (
+ "Plugin name for which to remove the binary. "
+ + "If no argument is given, all binaries are removed."
+ )
+ example_text = (
+ "examples:\n"
+ + " imageio_remove_bin all\n"
+ + " imageio_remove_bin freeimage\n"
+ )
+ parser = argparse.ArgumentParser(
+ description=description,
+ epilog=example_text,
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ )
+ parser.add_argument("plugin", type=str, nargs="*", default="all", help=phelp)
+ args = parser.parse_args()
+ remove_bin(plugin_names=args.plugin)
+
+
+if __name__ == "__main__":
+ if len(sys.argv) > 1 and sys.argv[1] == "download_bin":
+ download_bin_main()
+ elif len(sys.argv) > 1 and sys.argv[1] == "remove_bin":
+ remove_bin_main()
+ else:
+ raise RuntimeError("Invalid use of the imageio CLI")
diff --git a/Lib/site-packages/imageio/config/__init__.py b/Lib/site-packages/imageio/config/__init__.py
new file mode 100644
index 0000000..ca78dd2
--- /dev/null
+++ b/Lib/site-packages/imageio/config/__init__.py
@@ -0,0 +1,16 @@
+from .extensions import (
+ extension_list,
+ known_extensions,
+ FileExtension,
+ video_extensions,
+)
+from .plugins import known_plugins, PluginConfig
+
+__all__ = [
+ "known_plugins",
+ "PluginConfig",
+ "extension_list",
+ "known_extensions",
+ "FileExtension",
+ "video_extensions",
+]
diff --git a/Lib/site-packages/imageio/config/extensions.py b/Lib/site-packages/imageio/config/extensions.py
new file mode 100644
index 0000000..46a6415
--- /dev/null
+++ b/Lib/site-packages/imageio/config/extensions.py
@@ -0,0 +1,2002 @@
+"""
+A set of objects representing each file extension recognized by ImageIO. If an
+extension is not listed here it is still supported, as long as there exists a
+supporting backend.
+
+"""
+
+
+class FileExtension:
+ """File Extension Metadata
+
+ This class holds information about a image file format associated with a
+ given extension. This information is used to track plugins that are known to
+ be able to handle a particular format. It also contains additional
+ information about a format, which is used when creating the supported format
+ docs.
+
+ Plugins known to be able to handle this format are ordered by a ``priority``
+ list. This list is used to determine the ideal plugin to use when choosing a
+ plugin based on file extension.
+
+ Parameters
+ ----------
+ extension : str
+ The name of the extension including the initial dot, e.g. ".png".
+ priority : List
+ A list of plugin names (entries in config.known_plugins) that can handle
+ this format. The position of a plugin expresses a preference, e.g.
+ ["plugin1", "plugin2"] indicates that, if available, plugin1 should be
+ preferred over plugin2 when handling a request related to this format.
+ name : str
+ The full name of the format.
+ description : str
+ A description of the format.
+ external_link : str
+ A link to further information about the format. Typically, the format's
+ specification.
+ volume_support : str
+ If True, the format/extension supports volumetric image data.
+
+ Examples
+ --------
+ >>> FileExtension(
+ name="Bitmap",
+ extension=".bmp",
+ priority=["pillow", "BMP-PIL", "BMP-FI", "ITK"],
+ external_link="https://en.wikipedia.org/wiki/BMP_file_format",
+ )
+
+ """
+
+ def __init__(
+ self,
+ *,
+ extension,
+ priority,
+ name=None,
+ description=None,
+ external_link=None,
+ volume_support=False
+ ):
+ self.extension = extension
+ self.priority = priority
+ self.name = name
+ self.description = description
+ self.external_link = external_link
+ self.default_priority = priority.copy()
+ self.volume_support = volume_support
+
+ def reset(self):
+ self.priority = self.default_priority.copy()
+
+
+extension_list = [
+ FileExtension(
+ name="Hasselblad raw",
+ extension=".3fr",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="Sony alpha",
+ extension=".arw",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="Animated Portable Network Graphics",
+ external_link="https://en.wikipedia.org/wiki/APNG",
+ extension=".apng",
+ priority=["pillow", "pyav"],
+ ),
+ FileExtension(
+ name="Audio Video Interleave",
+ extension=".avi",
+ priority=["FFMPEG"],
+ ),
+ FileExtension(
+ name="Casio raw format",
+ extension=".bay",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".blp",
+ priority=["pillow"],
+ ),
+ FileExtension(
+ name="Bitmap",
+ extension=".bmp",
+ priority=["pillow", "BMP-PIL", "BMP-FI", "ITK", "pyav", "opencv"],
+ external_link="https://en.wikipedia.org/wiki/BMP_file_format",
+ ),
+ FileExtension(
+ name="Device-Independent Bitmap",
+ extension=".dip",
+ priority=["opencv"],
+ external_link="https://en.wikipedia.org/wiki/BMP_file_format",
+ ),
+ FileExtension(
+ name="Re-Volt mipmap",
+ extension=".bmq",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="Binary Structured Data Format",
+ extension=".bsdf",
+ priority=["BSDF"],
+ external_link="http://bsdf.io/",
+ ),
+ FileExtension(
+ name="Binary Universal Form for the Representation of meteorological data",
+ extension=".bufr",
+ priority=["pillow", "BUFR-PIL"],
+ ),
+ FileExtension(
+ name="Silicon Graphics Image",
+ extension=".bw",
+ priority=["pillow", "SGI-PIL", "SGI-FI"],
+ ),
+ FileExtension(
+ name="Scirra Construct",
+ extension=".cap",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="AMETEK High Speed Camera Format",
+ extension=".cine",
+ priority=["RAW-FI"],
+ external_link="https://phantomhighspeed-knowledge.secure.force.com/servlet/fileField?id=0BE1N000000kD2i#:~:text=Cine%20is%20a%20video%20file,camera%20model%20and%20image%20resolution",
+ ),
+ FileExtension(extension=".cr2", priority=["RAW-FI"]),
+ FileExtension(
+ extension=".crw",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".cs1",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="Computerized Tomography",
+ extension=".ct",
+ priority=["DICOM"],
+ ),
+ FileExtension(
+ name="Windows Cursor Icons",
+ extension=".cur",
+ priority=["pillow", "CUR-PIL"],
+ ),
+ FileExtension(
+ name="Dr. Halo",
+ extension=".cut",
+ priority=["CUT-FI"],
+ ),
+ FileExtension(
+ extension=".dc2",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="DICOM file format",
+ extension=".dcm",
+ priority=["DICOM", "ITK"],
+ ),
+ FileExtension(
+ extension=".dcr",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="Intel DCX",
+ extension=".dcx",
+ priority=["pillow", "DCX-PIL"],
+ ),
+ FileExtension(
+ name="DirectX Texture Container",
+ extension=".dds",
+ priority=["pillow", "DDS-FI", "DDS-PIL"],
+ ),
+ FileExtension(
+ name="Windows Bitmap",
+ extension=".dib",
+ priority=["pillow", "DIB-PIL"],
+ ),
+ FileExtension(
+ name="DICOM file format",
+ extension=".dicom",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ extension=".dng",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".drf",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".dsc",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="Enhanced Compression Wavelet",
+ extension=".ecw",
+ priority=["GDAL"],
+ ),
+ FileExtension(
+ name="Windows Metafile",
+ extension=".emf",
+ priority=["pillow", "WMF-PIL"],
+ ),
+ FileExtension(
+ name="Encapsulated Postscript",
+ extension=".eps",
+ priority=["pillow", "EPS-PIL"],
+ ),
+ FileExtension(
+ extension=".erf",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="OpenEXR",
+ extension=".exr",
+ external_link="https://openexr.readthedocs.io/en/latest/",
+ priority=["EXR-FI", "pyav", "opencv"],
+ ),
+ FileExtension(
+ extension=".fff",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="Flexible Image Transport System File",
+ extension=".fit",
+ priority=["pillow", "FITS-PIL", "FITS"],
+ ),
+ FileExtension(
+ name="Flexible Image Transport System File",
+ extension=".fits",
+ priority=["pillow", "FITS-PIL", "FITS", "pyav"],
+ ),
+ FileExtension(
+ name="Autodesk FLC Animation",
+ extension=".flc",
+ priority=["pillow", "FLI-PIL"],
+ ),
+ FileExtension(
+ name="Autodesk FLI Animation",
+ extension=".fli",
+ priority=["pillow", "FLI-PIL"],
+ ),
+ FileExtension(
+ name="Kodak FlashPix",
+ extension=".fpx",
+ priority=["pillow", "FPX-PIL"],
+ ),
+ FileExtension(
+ name="Independence War 2: Edge Of Chaos Texture Format",
+ extension=".ftc",
+ priority=["pillow", "FTEX-PIL"],
+ ),
+ FileExtension(
+ name="Flexible Image Transport System File",
+ extension=".fts",
+ priority=["FITS"],
+ ),
+ FileExtension(
+ name="Independence War 2: Edge Of Chaos Texture Format",
+ extension=".ftu",
+ priority=["pillow", "FTEX-PIL"],
+ ),
+ FileExtension(
+ name="Flexible Image Transport System File",
+ extension=".fz",
+ priority=["FITS"],
+ ),
+ FileExtension(
+ name="Raw fax format CCITT G.3",
+ extension=".g3",
+ priority=["G3-FI"],
+ ),
+ FileExtension(
+ name="GIMP brush file",
+ extension=".gbr",
+ priority=["pillow", "GBR-PIL"],
+ ),
+ FileExtension(
+ name="Grassroots DICOM",
+ extension=".gdcm",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ name="Graphics Interchange Format",
+ extension=".gif",
+ priority=["pillow", "GIF-PIL", "pyav"],
+ ),
+ FileExtension(
+ name="UMDS GIPL",
+ extension=".gipl",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ name="gridded meteorological data",
+ extension=".grib",
+ priority=["pillow", "GRIB-PIL"],
+ ),
+ FileExtension(
+ name="Hierarchical Data Format 5",
+ extension=".h5",
+ priority=["pillow", "HDF5-PIL"],
+ ),
+ FileExtension(
+ name="Hierarchical Data Format 5",
+ extension=".hdf",
+ priority=["pillow", "HDF5-PIL"],
+ ),
+ FileExtension(
+ name="Hierarchical Data Format 5",
+ extension=".hdf5",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ name="JPEG Extended Range",
+ extension=".hdp",
+ priority=["JPEG-XR-FI"],
+ ),
+ FileExtension(
+ name="High Dynamic Range Image",
+ extension=".hdr",
+ priority=["HDR-FI", "ITK", "opencv"],
+ ),
+ FileExtension(
+ extension=".ia",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".icb",
+ priority=["pillow"],
+ ),
+ FileExtension(
+ name="Mac OS Icon File",
+ extension=".icns",
+ priority=["pillow", "ICNS-PIL"],
+ ),
+ FileExtension(
+ name="Windows Icon File",
+ extension=".ico",
+ priority=["pillow", "ICO-FI", "ICO-PIL", "pyav"],
+ ),
+ FileExtension(
+ name="ILBM Interleaved Bitmap",
+ extension=".iff",
+ priority=["IFF-FI"],
+ ),
+ FileExtension(
+ name="IPTC/NAA",
+ extension=".iim",
+ priority=["pillow", "IPTC-PIL"],
+ ),
+ FileExtension(
+ extension=".iiq",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="IFUNC Image Memory",
+ extension=".im",
+ priority=["pillow", "IM-PIL"],
+ ),
+ FileExtension(
+ extension=".img",
+ priority=["ITK", "GDAL"],
+ ),
+ FileExtension(
+ extension=".img.gz",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ name="IM Tools",
+ extension=".IMT",
+ priority=["pillow", "IMT-PIL"],
+ ),
+ FileExtension(
+ name="Image Processing Lab",
+ extension=".ipl",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ name="JPEG 2000",
+ extension=".j2c",
+ priority=["pillow", "J2K-FI", "JPEG2000-PIL", "pyav"],
+ ),
+ FileExtension(
+ name="JPEG 2000",
+ extension=".j2k",
+ priority=["pillow", "J2K-FI", "JPEG2000-PIL", "pyav"],
+ ),
+ FileExtension(
+ name="JPEG",
+ extension=".jfif",
+ priority=["pillow", "JPEG-PIL"],
+ ),
+ FileExtension(
+ name="JPEG",
+ extension=".jif",
+ priority=["JPEG-FI"],
+ ),
+ FileExtension(
+ name="JPEG Network Graphics",
+ extension=".jng",
+ priority=["JNG-FI"],
+ ),
+ FileExtension(
+ name="JPEG 2000",
+ extension=".jp2",
+ priority=["pillow", "JP2-FI", "JPEG2000-PIL", "pyav", "opencv"],
+ ),
+ FileExtension(
+ name="JPEG 2000",
+ extension=".jpc",
+ priority=["pillow", "JPEG2000-PIL"],
+ ),
+ FileExtension(
+ name="JPEG",
+ extension=".jpe",
+ priority=["pillow", "JPEG-FI", "JPEG-PIL", "opencv"],
+ ),
+ FileExtension(
+ name="Joint Photographic Experts Group",
+ extension=".jpeg",
+ priority=["pillow", "JPEG-PIL", "JPEG-FI", "ITK", "GDAL", "pyav", "opencv"],
+ ),
+ FileExtension(
+ name="JPEG 2000",
+ extension=".jpf",
+ priority=["pillow", "JPEG2000-PIL"],
+ ),
+ FileExtension(
+ name="Joint Photographic Experts Group",
+ extension=".jpg",
+ priority=["pillow", "JPEG-PIL", "JPEG-FI", "ITK", "GDAL", "pyav", "opencv"],
+ ),
+ FileExtension(
+ name="JPEG 2000",
+ extension=".jpx",
+ priority=["pillow", "JPEG2000-PIL"],
+ ),
+ FileExtension(
+ name="JPEG Extended Range",
+ extension=".jxr",
+ priority=["JPEG-XR-FI"],
+ ),
+ FileExtension(
+ extension=".k25",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".kc2",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".kdc",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="C64 Koala Graphics",
+ extension=".koa",
+ priority=["KOALA-FI"],
+ ),
+ FileExtension(
+ name="ILBM Interleaved Bitmap",
+ extension=".lbm",
+ priority=["IFF-FI"],
+ ),
+ FileExtension(
+ name="Lytro F01",
+ extension=".lfp",
+ priority=["LYTRO-LFP"],
+ ),
+ FileExtension(
+ name="Lytro Illum",
+ extension=".lfr",
+ priority=["LYTRO-LFR"],
+ ),
+ FileExtension(
+ name="ZEISS LSM",
+ extension=".lsm",
+ priority=["tifffile", "ITK", "TIFF"],
+ ),
+ FileExtension(
+ name="McIdas area file",
+ extension=".MCIDAS",
+ priority=["pillow", "MCIDAS-PIL"],
+ external_link="https://www.ssec.wisc.edu/mcidas/doc/prog_man/2003print/progman2003-formats.html",
+ ),
+ FileExtension(
+ extension=".mdc",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".mef",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="FreeSurfer File Format",
+ extension=".mgh",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ name="ITK MetaImage",
+ extension=".mha",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ name="ITK MetaImage Header",
+ extension=".mhd",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ name="Microsoft Image Composer",
+ extension=".mic",
+ priority=["pillow", "MIC-PIL"],
+ ),
+ FileExtension(
+ name="Matroska Multimedia Container",
+ extension=".mkv",
+ priority=["FFMPEG", "pyav"],
+ ),
+ FileExtension(
+ name="Medical Imaging NetCDF",
+ extension=".mnc",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ name="Medical Imaging NetCDF 2",
+ extension=".mnc2",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ name="Leaf Raw Image Format",
+ extension=".mos",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="QuickTime File Format",
+ extension=".mov",
+ priority=["FFMPEG", "pyav"],
+ ),
+ FileExtension(
+ name="MPEG-4 Part 14",
+ extension=".mp4",
+ priority=["FFMPEG", "pyav"],
+ ),
+ FileExtension(
+ name="MPEG-1 Moving Picture Experts Group",
+ extension=".mpeg",
+ priority=["FFMPEG", "pyav"],
+ ),
+ FileExtension(
+ name="Moving Picture Experts Group",
+ extension=".mpg",
+ priority=["pillow", "FFMPEG", "pyav"],
+ ),
+ FileExtension(
+ name="JPEG Multi-Picture Format",
+ extension=".mpo",
+ priority=["pillow", "MPO-PIL"],
+ ),
+ FileExtension(
+ name="Magnetic resonance imaging",
+ extension=".mri",
+ priority=["DICOM"],
+ ),
+ FileExtension(
+ extension=".mrw",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="Windows Paint",
+ extension=".msp",
+ priority=["pillow", "MSP-PIL"],
+ ),
+ FileExtension(
+ extension=".nef",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".nhdr",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ extension=".nia",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ extension=".nii",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ name="nii.gz",
+ extension=".nii.gz",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ name="Numpy Array",
+ extension=".npz",
+ priority=["NPZ"],
+ volume_support=True,
+ ),
+ FileExtension(
+ extension=".nrrd",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ extension=".nrw",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".orf",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".palm",
+ priority=["pillow"],
+ ),
+ FileExtension(
+ name="Portable Bitmap",
+ extension=".pbm",
+ priority=["PGM-FI", "PGMRAW-FI", "pyav", "opencv"],
+ ),
+ FileExtension(
+ name="Kodak PhotoCD",
+ extension=".pcd",
+ priority=["pillow", "PCD-FI", "PCD-PIL"],
+ ),
+ FileExtension(
+ name="Macintosh PICT",
+ extension=".pct",
+ priority=["PICT-FI"],
+ ),
+ FileExtension(
+ name="Zsoft Paintbrush",
+ extension=".PCX",
+ priority=["pillow", "PCX-FI", "PCX-PIL"],
+ ),
+ FileExtension(
+ extension=".pdf",
+ priority=["pillow"],
+ ),
+ FileExtension(
+ extension=".pef",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".pfm",
+ priority=["PFM-FI", "pyav", "opencv"],
+ ),
+ FileExtension(
+ name="Portable Greymap",
+ extension=".pgm",
+ priority=["pillow", "PGM-FI", "PGMRAW-FI", "pyav", "opencv"],
+ ),
+ FileExtension(
+ name="Macintosh PICT",
+ extension=".pic",
+ priority=["PICT-FI", "ITK", "opencv"],
+ ),
+ FileExtension(
+ name="Macintosh PICT",
+ extension=".pict",
+ priority=["PICT-FI"],
+ ),
+ FileExtension(
+ name="Portable Network Graphics",
+ extension=".png",
+ priority=["pillow", "PNG-PIL", "PNG-FI", "ITK", "pyav", "opencv"],
+ ),
+ FileExtension(
+ name="Portable Image Format",
+ extension=".pnm",
+ priority=["pillow", "opencv"],
+ ),
+ FileExtension(
+ name="Pbmplus image",
+ extension=".ppm",
+ priority=["pillow", "PPM-PIL", "pyav"],
+ ),
+ FileExtension(
+ name="Pbmplus image",
+ extension=".pbm",
+ priority=["pillow", "PPM-PIL", "PPM-FI"],
+ ),
+ FileExtension(
+ name="Portable image format",
+ extension=".pxm",
+ priority=["opencv"],
+ ),
+ FileExtension(
+ name="Portable Pixelmap (ASCII)",
+ extension=".ppm",
+ priority=["PPM-FI", "opencv"],
+ ),
+ FileExtension(
+ name="Portable Pixelmap (Raw)",
+ extension=".ppm",
+ priority=["PPMRAW-FI"],
+ ),
+ FileExtension(
+ name="Ghostscript",
+ extension=".ps",
+ priority=["pillow", "EPS-PIL"],
+ ),
+ FileExtension(
+ name="Adope Photoshop 2.5 and 3.0",
+ extension=".psd",
+ priority=["pillow", "PSD-PIL", "PSD-FI"],
+ ),
+ FileExtension(
+ extension=".ptx",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".pxn",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="PIXAR raster image",
+ extension=".pxr",
+ priority=["pillow", "PIXAR-PIL"],
+ ),
+ FileExtension(
+ extension=".qtk",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".raf",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="Sun Raster File",
+ extension=".ras",
+ priority=["pillow", "SUN-PIL", "RAS-FI", "pyav", "opencv"],
+ ),
+ FileExtension(
+ name="Sun Raster File",
+ extension=".sr",
+ priority=["opencv"],
+ ),
+ FileExtension(
+ extension=".raw",
+ priority=["RAW-FI", "LYTRO-ILLUM-RAW", "LYTRO-F01-RAW"],
+ ),
+ FileExtension(
+ extension=".rdc",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="Silicon Graphics Image",
+ extension=".rgb",
+ priority=["pillow", "SGI-PIL"],
+ ),
+ FileExtension(
+ name="Silicon Graphics Image",
+ extension=".rgba",
+ priority=["pillow", "SGI-PIL"],
+ ),
+ FileExtension(
+ extension=".rw2",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".rwl",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".rwz",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ name="Silicon Graphics Image",
+ extension=".sgi",
+ priority=["pillow", "SGI-PIL", "pyav"],
+ ),
+ FileExtension(
+ name="SPE File Format",
+ extension=".spe",
+ priority=["SPE"],
+ ),
+ FileExtension(
+ extension=".SPIDER",
+ priority=["pillow", "SPIDER-PIL"],
+ ),
+ FileExtension(
+ extension=".sr2",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".srf",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".srw",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".sti",
+ priority=["RAW-FI"],
+ ),
+ FileExtension(
+ extension=".stk",
+ priority=["tifffile", "TIFF"],
+ ),
+ FileExtension(
+ name="ShockWave Flash",
+ extension=".swf",
+ priority=["SWF", "pyav"],
+ ),
+ FileExtension(
+ name="Truevision TGA",
+ extension=".targa",
+ priority=["pillow", "TARGA-FI"],
+ ),
+ FileExtension(
+ name="Truevision TGA",
+ extension=".tga",
+ priority=["pillow", "TGA-PIL", "TARGA-FI", "pyav"],
+ ),
+ FileExtension(
+ name="Tagged Image File",
+ extension=".tif",
+ priority=[
+ "tifffile",
+ "TIFF",
+ "pillow",
+ "TIFF-PIL",
+ "TIFF-FI",
+ "FEI",
+ "ITK",
+ "GDAL",
+ "pyav",
+ "opencv",
+ ],
+ volume_support=True,
+ ),
+ FileExtension(
+ name="Tagged Image File Format",
+ extension=".tiff",
+ priority=[
+ "tifffile",
+ "TIFF",
+ "pillow",
+ "TIFF-PIL",
+ "TIFF-FI",
+ "FEI",
+ "ITK",
+ "GDAL",
+ "pyav",
+ "opencv",
+ ],
+ volume_support=True,
+ ),
+ FileExtension(
+ extension=".vda",
+ priority=["pillow"],
+ ),
+ FileExtension(
+ extension=".vst",
+ priority=["pillow"],
+ ),
+ FileExtension(
+ extension=".vtk",
+ priority=["ITK"],
+ ),
+ FileExtension(
+ name="Wireless Bitmap",
+ extension=".wap",
+ priority=["WBMP-FI"],
+ ),
+ FileExtension(
+ name="Wireless Bitmap",
+ extension=".wbm",
+ priority=["WBMP-FI"],
+ ),
+ FileExtension(
+ name="Wireless Bitmap",
+ extension=".wbmp",
+ priority=["WBMP-FI"],
+ ),
+ FileExtension(
+ name="JPEG Extended Range",
+ extension=".wdp",
+ priority=["JPEG-XR-FI"],
+ ),
+ FileExtension(
+ name="Matroska",
+ extension=".webm",
+ priority=["FFMPEG", "pyav"],
+ ),
+ FileExtension(
+ name="Google WebP",
+ extension=".webp",
+ priority=["pillow", "WEBP-FI", "pyav", "opencv"],
+ ),
+ FileExtension(
+ name="Windows Meta File",
+ extension=".wmf",
+ priority=["pillow", "WMF-PIL"],
+ ),
+ FileExtension(
+ name="Windows Media Video",
+ extension=".wmv",
+ priority=["FFMPEG"],
+ ),
+ FileExtension(
+ name="X11 Bitmap",
+ extension=".xbm",
+ priority=["pillow", "XBM-PIL", "XBM-FI", "pyav"],
+ ),
+ FileExtension(
+ name="X11 Pixel Map",
+ extension=".xpm",
+ priority=["pillow", "XPM-PIL", "XPM-FI"],
+ ),
+ FileExtension(
+ name="Thumbnail Image",
+ extension=".XVTHUMB",
+ priority=["pillow", "XVTHUMB-PIL"],
+ ),
+ FileExtension(
+ extension=".dpx",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".im1",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".im24",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".im8",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".jls",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".ljpg",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".pam",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".pcx",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".pgmyuv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".pix",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".ppm",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".rs",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".sun",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".sunras",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".xface",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".xwd",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".y",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP (3GPP file format)",
+ extension=".3g2",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP (3GPP file format)",
+ extension=".3gp",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP (3GPP file format)",
+ extension=".f4v",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP (3GPP file format)",
+ extension=".ism",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP (3GPP file format)",
+ extension=".isma",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP (3GPP file format)",
+ extension=".ismv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP (3GPP file format)",
+ extension=".m4a",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP (3GPP file format)",
+ extension=".m4b",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP (3GPP file format)",
+ extension=".mj2",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP (3GPP file format)",
+ extension=".psp",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP2 (3GPP2 file format)",
+ extension=".3g2",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP2 (3GPP2 file format)",
+ extension=".3gp",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP2 (3GPP2 file format)",
+ extension=".f4v",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP2 (3GPP2 file format)",
+ extension=".ism",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP2 (3GPP2 file format)",
+ extension=".isma",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP2 (3GPP2 file format)",
+ extension=".ismv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP2 (3GPP2 file format)",
+ extension=".m4a",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP2 (3GPP2 file format)",
+ extension=".m4b",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP2 (3GPP2 file format)",
+ extension=".mj2",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GP2 (3GPP2 file format)",
+ extension=".psp",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="3GPP AMR",
+ extension=".amr",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="a64 - video for Commodore 64",
+ extension=".A64",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="a64 - video for Commodore 64",
+ extension=".a64",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Adobe Filmstrip",
+ extension=".flm",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="AMV",
+ extension=".amv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="ASF (Advanced / Active Streaming Format)",
+ extension=".asf",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="ASF (Advanced / Active Streaming Format)",
+ extension=".asf",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="ASF (Advanced / Active Streaming Format)",
+ extension=".wmv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="ASF (Advanced / Active Streaming Format)",
+ extension=".wmv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="AV1 Annex B",
+ extension=".obu",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="AV1 low overhead OBU",
+ extension=".obu",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="AVI (Audio Video Interleaved)",
+ extension=".avi",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="AVR (Audio Visual Research)",
+ extension=".avr",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Beam Software SIFF",
+ extension=".vb",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="CD Graphics",
+ extension=".cdg",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Commodore CDXL video",
+ extension=".cdxl",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Commodore CDXL video",
+ extension=".xl",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="DASH Muxer",
+ extension=".mpd",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Digital Pictures SGA",
+ extension=".sga",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Discworld II BMV",
+ extension=".bmv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="DV (Digital Video)",
+ extension=".dif",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="DV (Digital Video)",
+ extension=".dv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="F4V Adobe Flash Video",
+ extension=".f4v",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="FLV (Flash Video)",
+ extension=".flv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="GXF (General eXchange Format)",
+ extension=".gxf",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="iCE Draw File",
+ extension=".idf",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="IFV CCTV DVR",
+ extension=".ifv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="iPod H.264 MP4 (MPEG-4 Part 14)",
+ extension=".m4a",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="iPod H.264 MP4 (MPEG-4 Part 14)",
+ extension=".m4b",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="iPod H.264 MP4 (MPEG-4 Part 14)",
+ extension=".m4v",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="IVR (Internet Video Recording)",
+ extension=".ivr",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Konami PS2 SVAG",
+ extension=".svag",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="KUX (YouKu)",
+ extension=".kux",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="live RTMP FLV (Flash Video)",
+ extension=".flv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Loki SDL MJPEG",
+ extension=".mjpg",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="LVF",
+ extension=".lvf",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Matroska / WebM",
+ extension=".mk3d",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Matroska / WebM",
+ extension=".mka",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Matroska / WebM",
+ extension=".mks",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Microsoft XMV",
+ extension=".xmv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MIME multipart JPEG",
+ extension=".mjpg",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MobiClip MODS",
+ extension=".mods",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MobiClip MOFLEX",
+ extension=".moflex",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Motion Pixels MVI",
+ extension=".mvi",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MP4 (MPEG-4 Part 14)",
+ extension=".3g2",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MP4 (MPEG-4 Part 14)",
+ extension=".3gp",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MP4 (MPEG-4 Part 14)",
+ extension=".f4v",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MP4 (MPEG-4 Part 14)",
+ extension=".ism",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MP4 (MPEG-4 Part 14)",
+ extension=".isma",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MP4 (MPEG-4 Part 14)",
+ extension=".ismv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MP4 (MPEG-4 Part 14)",
+ extension=".m4a",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MP4 (MPEG-4 Part 14)",
+ extension=".m4b",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MP4 (MPEG-4 Part 14)",
+ extension=".mj2",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MP4 (MPEG-4 Part 14)",
+ extension=".psp",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MPEG-2 PS (DVD VOB)",
+ extension=".dvd",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MPEG-2 PS (SVCD)",
+ extension=".vob",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MPEG-2 PS (VOB)",
+ extension=".vob",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MPEG-TS (MPEG-2 Transport Stream)",
+ extension=".m2t",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MPEG-TS (MPEG-2 Transport Stream)",
+ extension=".m2ts",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MPEG-TS (MPEG-2 Transport Stream)",
+ extension=".mts",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MPEG-TS (MPEG-2 Transport Stream)",
+ extension=".ts",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Musepack",
+ extension=".mpc",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MXF (Material eXchange Format) Operational Pattern Atom",
+ extension=".mxf",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MXF (Material eXchange Format)",
+ extension=".mxf",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="MxPEG clip",
+ extension=".mxg",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="NC camera feed",
+ extension=".v",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="NUT",
+ extension=".nut",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Ogg Video",
+ extension=".ogv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Ogg",
+ extension=".ogg",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="On2 IVF",
+ extension=".ivf",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="PSP MP4 (MPEG-4 Part 14)",
+ extension=".psp",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Psygnosis YOP",
+ extension=".yop",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="QuickTime / MOV",
+ extension=".3g2",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="QuickTime / MOV",
+ extension=".3gp",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="QuickTime / MOV",
+ extension=".f4v",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="QuickTime / MOV",
+ extension=".ism",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="QuickTime / MOV",
+ extension=".isma",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="QuickTime / MOV",
+ extension=".ismv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="QuickTime / MOV",
+ extension=".m4a",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="QuickTime / MOV",
+ extension=".m4b",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="QuickTime / MOV",
+ extension=".mj2",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="QuickTime / MOV",
+ extension=".psp",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw AVS2-P2/IEEE1857.4 video",
+ extension=".avs",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw AVS2-P2/IEEE1857.4 video",
+ extension=".avs2",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw AVS3-P2/IEEE1857.10",
+ extension=".avs3",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw Chinese AVS (Audio Video Standard) video",
+ extension=".cavs",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw Dirac",
+ extension=".drc",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw Dirac",
+ extension=".vc2",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw DNxHD (SMPTE VC-3)",
+ extension=".dnxhd",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw DNxHD (SMPTE VC-3)",
+ extension=".dnxhr",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw GSM",
+ extension=".gsm",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw H.261",
+ extension=".h261",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw H.263",
+ extension=".h263",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw H.264 video",
+ extension=".264",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw H.264 video",
+ extension=".avc",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw H.264 video",
+ extension=".h264",
+ priority=["pyav", "FFMPEG"],
+ ),
+ FileExtension(
+ name="raw H.264 video",
+ extension=".h26l",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw HEVC video",
+ extension=".265",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw HEVC video",
+ extension=".h265",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw HEVC video",
+ extension=".hevc",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw id RoQ",
+ extension=".roq",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw Ingenient MJPEG",
+ extension=".cgi",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw IPU Video",
+ extension=".ipu",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw MJPEG 2000 video",
+ extension=".j2k",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw MJPEG video",
+ extension=".mjpeg",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw MJPEG video",
+ extension=".mjpg",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw MJPEG video",
+ extension=".mpo",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw MPEG-1 video",
+ extension=".m1v",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw MPEG-1 video",
+ extension=".mpeg",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw MPEG-1 video",
+ extension=".mpg",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw MPEG-2 video",
+ extension=".m2v",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw MPEG-4 video",
+ extension=".m4v",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw VC-1 video",
+ extension=".vc1",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw video",
+ extension=".cif",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw video",
+ extension=".qcif",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw video",
+ extension=".rgb",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="raw video",
+ extension=".yuv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="RealMedia",
+ extension=".rm",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="SDR2",
+ extension=".sdr2",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Sega FILM / CPK",
+ extension=".cpk",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="SER (Simple uncompressed video format for astronomical capturing)",
+ extension=".ser",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Simbiosis Interactive IMX",
+ extension=".imx",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Square SVS",
+ extension=".svs",
+ priority=["tifffile", "pyav"],
+ ),
+ FileExtension(
+ name="TiVo TY Stream",
+ extension=".ty",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="TiVo TY Stream",
+ extension=".ty+",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Uncompressed 4:2:2 10-bit",
+ extension=".v210",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Uncompressed 4:2:2 10-bit",
+ extension=".yuv10",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="VC-1 test bitstream",
+ extension=".rcv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Video CCTV DAT",
+ extension=".dat",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Video DAV",
+ extension=".dav",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Vivo",
+ extension=".viv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="WebM Chunk Muxer",
+ extension=".chk",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="WebM",
+ extension=".mk3d",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="WebM",
+ extension=".mka",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="WebM",
+ extension=".mks",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Windows Television (WTV)",
+ extension=".wtv",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="Xilam DERF",
+ extension=".adp",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ name="YUV4MPEG pipe",
+ extension=".y4m",
+ priority=["pyav"],
+ ),
+ FileExtension(
+ extension=".qpi",
+ priority=["tifffile"],
+ ),
+ FileExtension(
+ name="PCO Camera",
+ extension=".pcoraw",
+ priority=["tifffile"],
+ ),
+ FileExtension(
+ name="PCO Camera",
+ extension=".rec",
+ priority=["tifffile"],
+ ),
+ FileExtension(
+ name="Perkin Elmer Vectra",
+ extension=".qptiff",
+ priority=["tifffile"],
+ ),
+ FileExtension(
+ name="Pyramid Encoded TIFF",
+ extension=".ptiff",
+ priority=["tifffile"],
+ ),
+ FileExtension(
+ name="Pyramid Encoded TIFF",
+ extension=".ptif",
+ priority=["tifffile"],
+ ),
+ FileExtension(
+ name="Opticks Gel",
+ extension=".gel",
+ priority=["tifffile"],
+ ),
+ FileExtension(
+ name="Zoomify Image Format",
+ extension=".zif",
+ priority=["tifffile"],
+ ),
+ FileExtension(
+ name="Hamamatsu Slide Scanner",
+ extension=".ndpi",
+ priority=["tifffile"],
+ ),
+ FileExtension(
+ name="Roche Digital Pathology",
+ extension=".bif",
+ priority=["tifffile"],
+ ),
+ FileExtension(
+ extension=".tf8",
+ priority=["tifffile"],
+ ),
+ FileExtension(
+ extension=".btf",
+ priority=["tifffile"],
+ ),
+ FileExtension(
+ name="High Efficiency Image File Format",
+ extension=".heic",
+ priority=["pillow"],
+ ),
+ FileExtension(
+ name="AV1 Image File Format",
+ extension=".avif",
+ priority=["pillow"],
+ ),
+]
+extension_list.sort(key=lambda x: x.extension)
+
+
+known_extensions = dict()
+for ext in extension_list:
+ if ext.extension not in known_extensions:
+ known_extensions[ext.extension] = list()
+ known_extensions[ext.extension].append(ext)
+
+extension_list = [ext for ext_list in known_extensions.values() for ext in ext_list]
+
+_video_extension_strings = [
+ ".264",
+ ".265",
+ ".3g2",
+ ".3gp",
+ ".a64",
+ ".A64",
+ ".adp",
+ ".amr",
+ ".amv",
+ ".asf",
+ ".avc",
+ ".avi",
+ ".avr",
+ ".avs",
+ ".avs2",
+ ".avs3",
+ ".bmv",
+ ".cavs",
+ ".cdg",
+ ".cdxl",
+ ".cgi",
+ ".chk",
+ ".cif",
+ ".cpk",
+ ".dat",
+ ".dav",
+ ".dif",
+ ".dnxhd",
+ ".dnxhr",
+ ".drc",
+ ".dv",
+ ".dvd",
+ ".f4v",
+ ".flm",
+ ".flv",
+ ".gsm",
+ ".gxf",
+ ".h261",
+ ".h263",
+ ".h264",
+ ".h265",
+ ".h26l",
+ ".hevc",
+ ".idf",
+ ".ifv",
+ ".imx",
+ ".ipu",
+ ".ism",
+ ".isma",
+ ".ismv",
+ ".ivf",
+ ".ivr",
+ ".j2k",
+ ".kux",
+ ".lvf",
+ ".m1v",
+ ".m2t",
+ ".m2ts",
+ ".m2v",
+ ".m4a",
+ ".m4b",
+ ".m4v",
+ ".mj2",
+ ".mjpeg",
+ ".mjpg",
+ ".mk3d",
+ ".mka",
+ ".mks",
+ ".mkv",
+ ".mods",
+ ".moflex",
+ ".mov",
+ ".mp4",
+ ".mpc",
+ ".mpd",
+ ".mpeg",
+ ".mpg",
+ ".mpo",
+ ".mts",
+ ".mvi",
+ ".mxf",
+ ".mxg",
+ ".nut",
+ ".obu",
+ ".ogg",
+ ".ogv",
+ ".psp",
+ ".qcif",
+ ".rcv",
+ ".rgb",
+ ".rm",
+ ".roq",
+ ".sdr2",
+ ".ser",
+ ".sga",
+ ".svag",
+ ".svs",
+ ".ts",
+ ".ty",
+ ".ty+",
+ ".v",
+ ".v210",
+ ".vb",
+ ".vc1",
+ ".vc2",
+ ".viv",
+ ".vob",
+ ".webm",
+ ".wmv",
+ ".wtv",
+ ".xl",
+ ".xmv",
+ ".y4m",
+ ".yop",
+ ".yuv",
+ ".yuv10",
+]
+video_extensions = list()
+for ext_string in _video_extension_strings:
+ formats = known_extensions[ext_string]
+ video_extensions.append(formats[0])
+video_extensions.sort(key=lambda x: x.extension)
diff --git a/Lib/site-packages/imageio/config/extensions.pyi b/Lib/site-packages/imageio/config/extensions.pyi
new file mode 100644
index 0000000..266d063
--- /dev/null
+++ b/Lib/site-packages/imageio/config/extensions.pyi
@@ -0,0 +1,24 @@
+from typing import List, Dict, Optional
+
+class FileExtension:
+ extension: str
+ priority: List[str]
+ name: Optional[str] = None
+ description: Optional[str] = None
+ external_link: Optional[str] = None
+ volume_support: bool
+
+ def __init__(
+ self,
+ *,
+ extension: str,
+ priority: List[str],
+ name: str = None,
+ description: str = None,
+ external_link: str = None
+ ) -> None: ...
+ def reset(self) -> None: ...
+
+extension_list: List[FileExtension]
+known_extensions: Dict[str, List[FileExtension]]
+video_extensions: List[FileExtension]
diff --git a/Lib/site-packages/imageio/config/plugins.py b/Lib/site-packages/imageio/config/plugins.py
new file mode 100644
index 0000000..e55303a
--- /dev/null
+++ b/Lib/site-packages/imageio/config/plugins.py
@@ -0,0 +1,780 @@
+import importlib
+
+from ..core.legacy_plugin_wrapper import LegacyPlugin
+
+
+class PluginConfig:
+ """Plugin Configuration Metadata
+
+ This class holds the information needed to lazy-import plugins.
+
+ Parameters
+ ----------
+ name : str
+ The name of the plugin.
+ class_name : str
+ The name of the plugin class inside the plugin module.
+ module_name : str
+ The name of the module/package from which to import the plugin.
+ is_legacy : bool
+ If True, this plugin is a v2 plugin and will be wrapped in a
+ LegacyPlugin. Default: False.
+ package_name : str
+ If the given module name points to a relative module, then the package
+ name determines the package it is relative to.
+ install_name : str
+ The name of the optional dependency that can be used to install this
+ plugin if it is missing.
+ legacy_args : Dict
+ A dictionary of kwargs to pass to the v2 plugin (Format) upon construction.
+
+ Examples
+ --------
+ >>> PluginConfig(
+ name="TIFF",
+ class_name="TiffFormat",
+ module_name="imageio.plugins.tifffile",
+ is_legacy=True,
+ install_name="tifffile",
+ legacy_args={
+ "description": "TIFF format",
+ "extensions": ".tif .tiff .stk .lsm",
+ "modes": "iIvV",
+ },
+ )
+ >>> PluginConfig(
+ name="pillow",
+ class_name="PillowPlugin",
+ module_name="imageio.plugins.pillow"
+ )
+
+ """
+
+ def __init__(
+ self,
+ name,
+ class_name,
+ module_name,
+ *,
+ is_legacy=False,
+ package_name=None,
+ install_name=None,
+ legacy_args=None,
+ ):
+ legacy_args = legacy_args or dict()
+
+ self.name = name
+ self.class_name = class_name
+ self.module_name = module_name
+ self.package_name = package_name
+
+ self.is_legacy = is_legacy
+ self.install_name = install_name or self.name
+ self.legacy_args = {"name": name, "description": "A legacy plugin"}
+ self.legacy_args.update(legacy_args)
+
+ @property
+ def format(self):
+ """For backwards compatibility with FormatManager
+
+ Delete when migrating to v3
+ """
+ if not self.is_legacy:
+ raise RuntimeError("Can only get format for legacy plugins.")
+
+ module = importlib.import_module(self.module_name, self.package_name)
+ clazz = getattr(module, self.class_name)
+ return clazz(**self.legacy_args)
+
+ @property
+ def plugin_class(self):
+ """Get the plugin class (import if needed)
+
+ Returns
+ -------
+ plugin_class : Any
+ The class that can be used to instantiate plugins.
+
+ """
+
+ module = importlib.import_module(self.module_name, self.package_name)
+ clazz = getattr(module, self.class_name)
+
+ if self.is_legacy:
+ legacy_plugin = clazz(**self.legacy_args)
+
+ def partial_legacy_plugin(request):
+ return LegacyPlugin(request, legacy_plugin)
+
+ clazz = partial_legacy_plugin
+
+ return clazz
+
+
+known_plugins = dict()
+known_plugins["pillow"] = PluginConfig(
+ name="pillow", class_name="PillowPlugin", module_name="imageio.plugins.pillow"
+)
+known_plugins["pyav"] = PluginConfig(
+ name="pyav", class_name="PyAVPlugin", module_name="imageio.plugins.pyav"
+)
+known_plugins["opencv"] = PluginConfig(
+ name="opencv", class_name="OpenCVPlugin", module_name="imageio.plugins.opencv"
+)
+known_plugins["tifffile"] = PluginConfig(
+ name="tifffile",
+ class_name="TifffilePlugin",
+ module_name="imageio.plugins.tifffile_v3",
+)
+known_plugins["SPE"] = PluginConfig(
+ name="spe", class_name="SpePlugin", module_name="imageio.plugins.spe"
+)
+
+
+# Legacy plugins
+# ==============
+#
+# Which are partly registered by format, partly by plugin, and partly by a mix
+# of both. We keep the naming here for backwards compatibility.
+# In v3 this should become a single entry per plugin named after the plugin
+# We can choose extension-specific priority in ``config.extensions``.
+#
+# Note: Since python 3.7 order of insertion determines the order of dict().keys()
+# This means that the order here determines the order by which plugins are
+# checked during the full fallback search. We don't advertise this downstream,
+# but it could be a useful thing to keep in mind to choose a sensible default
+# search order.
+
+known_plugins["TIFF"] = PluginConfig(
+ name="TIFF",
+ class_name="TiffFormat",
+ module_name="imageio.plugins.tifffile",
+ is_legacy=True,
+ install_name="tifffile",
+ legacy_args={
+ "description": "TIFF format",
+ "extensions": ".tif .tiff .stk .lsm",
+ "modes": "iIvV",
+ },
+)
+
+# PILLOW plugin formats (legacy)
+PILLOW_FORMATS = [
+ ("BMP", "Windows Bitmap", ".bmp", "PillowFormat"),
+ ("BUFR", "BUFR", ".bufr", "PillowFormat"),
+ ("CUR", "Windows Cursor", ".cur", "PillowFormat"),
+ ("DCX", "Intel DCX", ".dcx", "PillowFormat"),
+ ("DDS", "DirectDraw Surface", ".dds", "PillowFormat"),
+ ("DIB", "Windows Bitmap", "", "PillowFormat"),
+ ("EPS", "Encapsulated Postscript", ".ps .eps", "PillowFormat"),
+ ("FITS", "FITS", ".fit .fits", "PillowFormat"),
+ ("FLI", "Autodesk FLI/FLC Animation", ".fli .flc", "PillowFormat"),
+ ("FPX", "FlashPix", ".fpx", "PillowFormat"),
+ ("FTEX", "Texture File Format (IW2:EOC)", ".ftc .ftu", "PillowFormat"),
+ ("GBR", "GIMP brush file", ".gbr", "PillowFormat"),
+ ("GIF", "Compuserve GIF", ".gif", "GIFFormat"),
+ ("GRIB", "GRIB", ".grib", "PillowFormat"),
+ ("HDF5", "HDF5", ".h5 .hdf", "PillowFormat"),
+ ("ICNS", "Mac OS icns resource", ".icns", "PillowFormat"),
+ ("ICO", "Windows Icon", ".ico", "PillowFormat"),
+ ("IM", "IFUNC Image Memory", ".im", "PillowFormat"),
+ ("IMT", "IM Tools", "", "PillowFormat"),
+ ("IPTC", "IPTC/NAA", ".iim", "PillowFormat"),
+ ("JPEG", "JPEG (ISO 10918)", ".jfif .jpe .jpg .jpeg", "JPEGFormat"),
+ (
+ "JPEG2000",
+ "JPEG 2000 (ISO 15444)",
+ ".jp2 .j2k .jpc .jpf .jpx .j2c",
+ "JPEG2000Format",
+ ),
+ ("MCIDAS", "McIdas area file", "", "PillowFormat"),
+ ("MIC", "Microsoft Image Composer", ".mic", "PillowFormat"),
+ # skipped in legacy pillow
+ # ("MPEG", "MPEG", ".mpg .mpeg", "PillowFormat"),
+ ("MPO", "MPO (CIPA DC-007)", ".mpo", "PillowFormat"),
+ ("MSP", "Windows Paint", ".msp", "PillowFormat"),
+ ("PCD", "Kodak PhotoCD", ".pcd", "PillowFormat"),
+ ("PCX", "Paintbrush", ".pcx", "PillowFormat"),
+ ("PIXAR", "PIXAR raster image", ".pxr", "PillowFormat"),
+ ("PNG", "Portable network graphics", ".png", "PNGFormat"),
+ ("PPM", "Pbmplus image", ".pbm .pgm .ppm", "PillowFormat"),
+ ("PSD", "Adobe Photoshop", ".psd", "PillowFormat"),
+ ("SGI", "SGI Image File Format", ".bw .rgb .rgba .sgi", "PillowFormat"),
+ ("SPIDER", "Spider 2D image", "", "PillowFormat"),
+ ("SUN", "Sun Raster File", ".ras", "PillowFormat"),
+ ("TGA", "Targa", ".tga", "PillowFormat"),
+ ("TIFF", "Adobe TIFF", ".tif .tiff", "TIFFFormat"),
+ ("WMF", "Windows Metafile", ".wmf .emf", "PillowFormat"),
+ ("XBM", "X11 Bitmap", ".xbm", "PillowFormat"),
+ ("XPM", "X11 Pixel Map", ".xpm", "PillowFormat"),
+ ("XVTHUMB", "XV thumbnail image", "", "PillowFormat"),
+]
+for id, summary, ext, class_name in PILLOW_FORMATS:
+ config = PluginConfig(
+ name=id.upper() + "-PIL",
+ class_name=class_name,
+ module_name="imageio.plugins.pillow_legacy",
+ is_legacy=True,
+ install_name="pillow",
+ legacy_args={
+ "description": summary + " via Pillow",
+ "extensions": ext,
+ "modes": "iI" if class_name == "GIFFormat" else "i",
+ "plugin_id": id,
+ },
+ )
+ known_plugins[config.name] = config
+
+known_plugins["FFMPEG"] = PluginConfig(
+ name="FFMPEG",
+ class_name="FfmpegFormat",
+ module_name="imageio.plugins.ffmpeg",
+ is_legacy=True,
+ install_name="ffmpeg",
+ legacy_args={
+ "description": "Many video formats and cameras (via ffmpeg)",
+ "extensions": ".mov .avi .mpg .mpeg .mp4 .mkv .webm .wmv .h264",
+ "modes": "I",
+ },
+)
+
+known_plugins["BSDF"] = PluginConfig(
+ name="BSDF",
+ class_name="BsdfFormat",
+ module_name="imageio.plugins.bsdf",
+ is_legacy=True,
+ install_name="bsdf",
+ legacy_args={
+ "description": "Format based on the Binary Structured Data Format",
+ "extensions": ".bsdf",
+ "modes": "iIvV",
+ },
+)
+
+known_plugins["DICOM"] = PluginConfig(
+ name="DICOM",
+ class_name="DicomFormat",
+ module_name="imageio.plugins.dicom",
+ is_legacy=True,
+ install_name="dicom",
+ legacy_args={
+ "description": "Digital Imaging and Communications in Medicine",
+ "extensions": ".dcm .ct .mri",
+ "modes": "iIvV",
+ },
+)
+
+known_plugins["FEI"] = PluginConfig(
+ name="FEI",
+ class_name="FEISEMFormat",
+ module_name="imageio.plugins.feisem",
+ is_legacy=True,
+ install_name="feisem",
+ legacy_args={
+ "description": "FEI-SEM TIFF format",
+ "extensions": [".tif", ".tiff"],
+ "modes": "iv",
+ },
+)
+
+known_plugins["FITS"] = PluginConfig(
+ name="FITS",
+ class_name="FitsFormat",
+ module_name="imageio.plugins.fits",
+ is_legacy=True,
+ install_name="fits",
+ legacy_args={
+ "description": "Flexible Image Transport System (FITS) format",
+ "extensions": ".fits .fit .fts .fz",
+ "modes": "iIvV",
+ },
+)
+
+known_plugins["GDAL"] = PluginConfig(
+ name="GDAL",
+ class_name="GdalFormat",
+ module_name="imageio.plugins.gdal",
+ is_legacy=True,
+ install_name="gdal",
+ legacy_args={
+ "description": "Geospatial Data Abstraction Library",
+ "extensions": ".tiff .tif .img .ecw .jpg .jpeg",
+ "modes": "iIvV",
+ },
+)
+
+known_plugins["ITK"] = PluginConfig(
+ name="ITK",
+ class_name="ItkFormat",
+ module_name="imageio.plugins.simpleitk",
+ is_legacy=True,
+ install_name="simpleitk",
+ legacy_args={
+ "description": "Insight Segmentation and Registration Toolkit (ITK) format",
+ "extensions": " ".join(
+ (
+ ".gipl",
+ ".ipl",
+ ".mha",
+ ".mhd",
+ ".nhdr",
+ ".nia",
+ ".hdr",
+ ".nrrd",
+ ".nii",
+ ".nii.gz",
+ ".img",
+ ".img.gz",
+ ".vtk",
+ ".hdf5",
+ ".lsm",
+ ".mnc",
+ ".mnc2",
+ ".mgh",
+ ".mnc",
+ ".pic",
+ ".bmp",
+ ".jpeg",
+ ".jpg",
+ ".png",
+ ".tiff",
+ ".tif",
+ ".dicom",
+ ".dcm",
+ ".gdcm",
+ )
+ ),
+ "modes": "iIvV",
+ },
+)
+
+known_plugins["NPZ"] = PluginConfig(
+ name="NPZ",
+ class_name="NpzFormat",
+ module_name="imageio.plugins.npz",
+ is_legacy=True,
+ install_name="numpy",
+ legacy_args={
+ "description": "Numpy's compressed array format",
+ "extensions": ".npz",
+ "modes": "iIvV",
+ },
+)
+
+known_plugins["SWF"] = PluginConfig(
+ name="SWF",
+ class_name="SWFFormat",
+ module_name="imageio.plugins.swf",
+ is_legacy=True,
+ install_name="swf",
+ legacy_args={
+ "description": "Shockwave flash",
+ "extensions": ".swf",
+ "modes": "I",
+ },
+)
+
+known_plugins["SCREENGRAB"] = PluginConfig(
+ name="SCREENGRAB",
+ class_name="ScreenGrabFormat",
+ module_name="imageio.plugins.grab",
+ is_legacy=True,
+ install_name="pillow",
+ legacy_args={
+ "description": "Grab screenshots (Windows and OS X only)",
+ "extensions": [],
+ "modes": "i",
+ },
+)
+
+known_plugins["CLIPBOARDGRAB"] = PluginConfig(
+ name="CLIPBOARDGRAB",
+ class_name="ClipboardGrabFormat",
+ module_name="imageio.plugins.grab",
+ is_legacy=True,
+ install_name="pillow",
+ legacy_args={
+ "description": "Grab from clipboard (Windows only)",
+ "extensions": [],
+ "modes": "i",
+ },
+)
+
+# LYTRO plugin (legacy)
+lytro_formats = [
+ ("lytro-lfr", "Lytro Illum lfr image file", ".lfr", "i", "LytroLfrFormat"),
+ (
+ "lytro-illum-raw",
+ "Lytro Illum raw image file",
+ ".raw",
+ "i",
+ "LytroIllumRawFormat",
+ ),
+ ("lytro-lfp", "Lytro F01 lfp image file", ".lfp", "i", "LytroLfpFormat"),
+ ("lytro-f01-raw", "Lytro F01 raw image file", ".raw", "i", "LytroF01RawFormat"),
+]
+for name, des, ext, mode, class_name in lytro_formats:
+ config = PluginConfig(
+ name=name.upper(),
+ class_name=class_name,
+ module_name="imageio.plugins.lytro",
+ is_legacy=True,
+ install_name="lytro",
+ legacy_args={
+ "description": des,
+ "extensions": ext,
+ "modes": mode,
+ },
+ )
+ known_plugins[config.name] = config
+
+# FreeImage plugin (legacy)
+FREEIMAGE_FORMATS = [
+ (
+ "BMP",
+ 0,
+ "Windows or OS/2 Bitmap",
+ ".bmp",
+ "i",
+ "FreeimageBmpFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "CUT",
+ 21,
+ "Dr. Halo",
+ ".cut",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "DDS",
+ 24,
+ "DirectX Surface",
+ ".dds",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "EXR",
+ 29,
+ "ILM OpenEXR",
+ ".exr",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "G3",
+ 27,
+ "Raw fax format CCITT G.3",
+ ".g3",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "GIF",
+ 25,
+ "Static and animated gif (FreeImage)",
+ ".gif",
+ "iI",
+ "GifFormat",
+ "imageio.plugins.freeimagemulti",
+ ),
+ (
+ "HDR",
+ 26,
+ "High Dynamic Range Image",
+ ".hdr",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "ICO",
+ 1,
+ "Windows Icon",
+ ".ico",
+ "iI",
+ "IcoFormat",
+ "imageio.plugins.freeimagemulti",
+ ),
+ (
+ "IFF",
+ 5,
+ "IFF Interleaved Bitmap",
+ ".iff .lbm",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "J2K",
+ 30,
+ "JPEG-2000 codestream",
+ ".j2k .j2c",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "JNG",
+ 3,
+ "JPEG Network Graphics",
+ ".jng",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "JP2",
+ 31,
+ "JPEG-2000 File Format",
+ ".jp2",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "JPEG",
+ 2,
+ "JPEG - JFIF Compliant",
+ ".jpg .jif .jpeg .jpe",
+ "i",
+ "FreeimageJpegFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "JPEG-XR",
+ 36,
+ "JPEG XR image format",
+ ".jxr .wdp .hdp",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "KOALA",
+ 4,
+ "C64 Koala Graphics",
+ ".koa",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ # not registered in legacy pillow
+ # ("MNG", 6, "Multiple-image Network Graphics", ".mng", "i", "FreeimageFormat", "imageio.plugins.freeimage"),
+ (
+ "PBM",
+ 7,
+ "Portable Bitmap (ASCII)",
+ ".pbm",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "PBMRAW",
+ 8,
+ "Portable Bitmap (RAW)",
+ ".pbm",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "PCD",
+ 9,
+ "Kodak PhotoCD",
+ ".pcd",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "PCX",
+ 10,
+ "Zsoft Paintbrush",
+ ".pcx",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "PFM",
+ 32,
+ "Portable floatmap",
+ ".pfm",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "PGM",
+ 11,
+ "Portable Greymap (ASCII)",
+ ".pgm",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "PGMRAW",
+ 12,
+ "Portable Greymap (RAW)",
+ ".pgm",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "PICT",
+ 33,
+ "Macintosh PICT",
+ ".pct .pict .pic",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "PNG",
+ 13,
+ "Portable Network Graphics",
+ ".png",
+ "i",
+ "FreeimagePngFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "PPM",
+ 14,
+ "Portable Pixelmap (ASCII)",
+ ".ppm",
+ "i",
+ "FreeimagePnmFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "PPMRAW",
+ 15,
+ "Portable Pixelmap (RAW)",
+ ".ppm",
+ "i",
+ "FreeimagePnmFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "PSD",
+ 20,
+ "Adobe Photoshop",
+ ".psd",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "RAS",
+ 16,
+ "Sun Raster Image",
+ ".ras",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "RAW",
+ 34,
+ "RAW camera image",
+ ".3fr .arw .bay .bmq .cap .cine .cr2 .crw .cs1 .dc2 "
+ ".dcr .drf .dsc .dng .erf .fff .ia .iiq .k25 .kc2 .kdc .mdc .mef .mos .mrw .nef .nrw .orf "
+ ".pef .ptx .pxn .qtk .raf .raw .rdc .rw2 .rwl .rwz .sr2 .srf .srw .sti",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "SGI",
+ 28,
+ "SGI Image Format",
+ ".sgi .rgb .rgba .bw",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "TARGA",
+ 17,
+ "Truevision Targa",
+ ".tga .targa",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "TIFF",
+ 18,
+ "Tagged Image File Format",
+ ".tif .tiff",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "WBMP",
+ 19,
+ "Wireless Bitmap",
+ ".wap .wbmp .wbm",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "WebP",
+ 35,
+ "Google WebP image format",
+ ".webp",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "XBM",
+ 22,
+ "X11 Bitmap Format",
+ ".xbm",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+ (
+ "XPM",
+ 23,
+ "X11 Pixmap Format",
+ ".xpm",
+ "i",
+ "FreeimageFormat",
+ "imageio.plugins.freeimage",
+ ),
+]
+for name, i, des, ext, mode, class_name, module_name in FREEIMAGE_FORMATS:
+ config = PluginConfig(
+ name=name.upper() + "-FI",
+ class_name=class_name,
+ module_name=module_name,
+ is_legacy=True,
+ install_name="freeimage",
+ legacy_args={
+ "description": des,
+ "extensions": ext,
+ "modes": mode,
+ "fif": i,
+ },
+ )
+ known_plugins[config.name] = config
+
+# exists for backwards compatibility with FormatManager
+# delete in V3
+_original_order = [x for x, config in known_plugins.items() if config.is_legacy]
diff --git a/Lib/site-packages/imageio/config/plugins.pyi b/Lib/site-packages/imageio/config/plugins.pyi
new file mode 100644
index 0000000..ab5d4a8
--- /dev/null
+++ b/Lib/site-packages/imageio/config/plugins.pyi
@@ -0,0 +1,28 @@
+from typing import Any, Dict, Optional
+from ..core.v3_plugin_api import PluginV3
+
+class PluginConfig:
+ name: str
+ class_name: str
+ module_name: str
+ is_legacy: bool
+ package_name: Optional[str] = None
+ install_name: Optional[str] = None
+ legacy_args: Optional[dict] = None
+ @property
+ def format(self) -> Any: ...
+ @property
+ def plugin_class(self) -> PluginV3: ...
+ def __init__(
+ self,
+ name: str,
+ class_name: str,
+ module_name: str,
+ *,
+ is_legacy: bool = False,
+ package_name: str = None,
+ install_name: str = None,
+ legacy_args: dict = None,
+ ) -> None: ...
+
+known_plugins: Dict[str, PluginConfig]
diff --git a/Lib/site-packages/imageio/core/__init__.py b/Lib/site-packages/imageio/core/__init__.py
new file mode 100644
index 0000000..80bedab
--- /dev/null
+++ b/Lib/site-packages/imageio/core/__init__.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+# Distributed under the (new) BSD License. See LICENSE.txt for more info.
+
+""" This subpackage provides the core functionality of imageio
+(everything but the plugins).
+"""
+
+# flake8: noqa
+
+from .util import Image, Array, Dict, asarray, image_as_uint, urlopen
+from .util import BaseProgressIndicator, StdoutProgressIndicator, IS_PYPY
+from .util import get_platform, appdata_dir, resource_dirs, has_module
+from .findlib import load_lib
+from .fetching import get_remote_file, InternetNotAllowedError, NeedDownloadError
+from .request import Request, read_n_bytes, RETURN_BYTES
+from .format import Format, FormatManager
diff --git a/Lib/site-packages/imageio/core/fetching.py b/Lib/site-packages/imageio/core/fetching.py
new file mode 100644
index 0000000..0380bc7
--- /dev/null
+++ b/Lib/site-packages/imageio/core/fetching.py
@@ -0,0 +1,247 @@
+# -*- coding: utf-8 -*-
+# Based on code from the vispy project
+# Distributed under the (new) BSD License. See LICENSE.txt for more info.
+
+"""Data downloading and reading functions
+"""
+
+from math import log
+import os
+from os import path as op
+import sys
+import shutil
+import time
+
+from . import appdata_dir, resource_dirs
+from . import StdoutProgressIndicator, urlopen
+
+
+class InternetNotAllowedError(IOError):
+ """Plugins that need resources can just use get_remote_file(), but
+ should catch this error and silently ignore it.
+ """
+
+ pass
+
+
+class NeedDownloadError(IOError):
+ """Is raised when a remote file is requested that is not locally
+ available, but which needs to be explicitly downloaded by the user.
+ """
+
+
+def get_remote_file(fname, directory=None, force_download=False, auto=True):
+ """Get a the filename for the local version of a file from the web
+
+ Parameters
+ ----------
+ fname : str
+ The relative filename on the remote data repository to download.
+ These correspond to paths on
+ ``https://github.com/imageio/imageio-binaries/``.
+ directory : str | None
+ The directory where the file will be cached if a download was
+ required to obtain the file. By default, the appdata directory
+ is used. This is also the first directory that is checked for
+ a local version of the file. If the directory does not exist,
+ it will be created.
+ force_download : bool | str
+ If True, the file will be downloaded even if a local copy exists
+ (and this copy will be overwritten). Can also be a YYYY-MM-DD date
+ to ensure a file is up-to-date (modified date of a file on disk,
+ if present, is checked).
+ auto : bool
+ Whether to auto-download the file if its not present locally. Default
+ True. If False and a download is needed, raises NeedDownloadError.
+
+ Returns
+ -------
+ fname : str
+ The path to the file on the local system.
+ """
+ _url_root = "https://github.com/imageio/imageio-binaries/raw/master/"
+ url = _url_root + fname
+ nfname = op.normcase(fname) # convert to native
+ # Get dirs to look for the resource
+ given_directory = directory
+ directory = given_directory or appdata_dir("imageio")
+ dirs = resource_dirs()
+ dirs.insert(0, directory) # Given dir has preference
+ # Try to find the resource locally
+ for dir in dirs:
+ filename = op.join(dir, nfname)
+ if op.isfile(filename):
+ if not force_download: # we're done
+ if given_directory and given_directory != dir:
+ filename2 = os.path.join(given_directory, nfname)
+ # Make sure the output directory exists
+ if not op.isdir(op.dirname(filename2)):
+ os.makedirs(op.abspath(op.dirname(filename2)))
+ shutil.copy(filename, filename2)
+ return filename2
+ return filename
+ if isinstance(force_download, str):
+ ntime = time.strptime(force_download, "%Y-%m-%d")
+ ftime = time.gmtime(op.getctime(filename))
+ if ftime >= ntime:
+ if given_directory and given_directory != dir:
+ filename2 = os.path.join(given_directory, nfname)
+ # Make sure the output directory exists
+ if not op.isdir(op.dirname(filename2)):
+ os.makedirs(op.abspath(op.dirname(filename2)))
+ shutil.copy(filename, filename2)
+ return filename2
+ return filename
+ else:
+ print("File older than %s, updating..." % force_download)
+ break
+
+ # If we get here, we're going to try to download the file
+ if os.getenv("IMAGEIO_NO_INTERNET", "").lower() in ("1", "true", "yes"):
+ raise InternetNotAllowedError(
+ "Will not download resource from the "
+ "internet because environment variable "
+ "IMAGEIO_NO_INTERNET is set."
+ )
+
+ # Can we proceed with auto-download?
+ if not auto:
+ raise NeedDownloadError()
+
+ # Get filename to store to and make sure the dir exists
+ filename = op.join(directory, nfname)
+ if not op.isdir(op.dirname(filename)):
+ os.makedirs(op.abspath(op.dirname(filename)))
+ # let's go get the file
+ if os.getenv("CONTINUOUS_INTEGRATION", False): # pragma: no cover
+ # On CI, we retry a few times ...
+ for i in range(2):
+ try:
+ _fetch_file(url, filename)
+ return filename
+ except IOError:
+ time.sleep(0.5)
+ else:
+ _fetch_file(url, filename)
+ return filename
+ else: # pragma: no cover
+ _fetch_file(url, filename)
+ return filename
+
+
+def _fetch_file(url, file_name, print_destination=True):
+ """Load requested file, downloading it if needed or requested
+
+ Parameters
+ ----------
+ url: string
+ The url of file to be downloaded.
+ file_name: string
+ Name, along with the path, of where downloaded file will be saved.
+ print_destination: bool, optional
+ If true, destination of where file was saved will be printed after
+ download finishes.
+ resume: bool, optional
+ If true, try to resume partially downloaded files.
+ """
+ # Adapted from NISL:
+ # https://github.com/nisl/tutorial/blob/master/nisl/datasets.py
+
+ print(
+ "Imageio: %r was not found on your computer; "
+ "downloading it now." % os.path.basename(file_name)
+ )
+
+ temp_file_name = file_name + ".part"
+ local_file = None
+ initial_size = 0
+ errors = []
+ for tries in range(4):
+ try:
+ # Checking file size and displaying it alongside the download url
+ remote_file = urlopen(url, timeout=5.0)
+ file_size = int(remote_file.headers["Content-Length"].strip())
+ size_str = _sizeof_fmt(file_size)
+ print("Try %i. Download from %s (%s)" % (tries + 1, url, size_str))
+ # Downloading data (can be extended to resume if need be)
+ local_file = open(temp_file_name, "wb")
+ _chunk_read(remote_file, local_file, initial_size=initial_size)
+ # temp file must be closed prior to the move
+ if not local_file.closed:
+ local_file.close()
+ shutil.move(temp_file_name, file_name)
+ if print_destination is True:
+ sys.stdout.write("File saved as %s.\n" % file_name)
+ break
+ except Exception as e:
+ errors.append(e)
+ print("Error while fetching file: %s." % str(e))
+ finally:
+ if local_file is not None:
+ if not local_file.closed:
+ local_file.close()
+ else:
+ raise IOError(
+ "Unable to download %r. Perhaps there is no internet "
+ "connection? If there is, please report this problem."
+ % os.path.basename(file_name)
+ )
+
+
+def _chunk_read(response, local_file, chunk_size=8192, initial_size=0):
+ """Download a file chunk by chunk and show advancement
+
+ Can also be used when resuming downloads over http.
+
+ Parameters
+ ----------
+ response: urllib.response.addinfourl
+ Response to the download request in order to get file size.
+ local_file: file
+ Hard disk file where data should be written.
+ chunk_size: integer, optional
+ Size of downloaded chunks. Default: 8192
+ initial_size: int, optional
+ If resuming, indicate the initial size of the file.
+ """
+ # Adapted from NISL:
+ # https://github.com/nisl/tutorial/blob/master/nisl/datasets.py
+
+ bytes_so_far = initial_size
+ # Returns only amount left to download when resuming, not the size of the
+ # entire file
+ total_size = int(response.headers["Content-Length"].strip())
+ total_size += initial_size
+
+ progress = StdoutProgressIndicator("Downloading")
+ progress.start("", "bytes", total_size)
+
+ while True:
+ chunk = response.read(chunk_size)
+ bytes_so_far += len(chunk)
+ if not chunk:
+ break
+ _chunk_write(chunk, local_file, progress)
+ progress.finish("Done")
+
+
+def _chunk_write(chunk, local_file, progress):
+ """Write a chunk to file and update the progress bar"""
+ local_file.write(chunk)
+ progress.increase_progress(len(chunk))
+ time.sleep(0) # Give other threads a chance, e.g. those that handle stdout pipes
+
+
+def _sizeof_fmt(num):
+ """Turn number of bytes into human-readable str"""
+ units = ["bytes", "kB", "MB", "GB", "TB", "PB"]
+ decimals = [0, 0, 1, 2, 2, 2]
+ """Human friendly file size"""
+ if num > 1:
+ exponent = min(int(log(num, 1024)), len(units) - 1)
+ quotient = float(num) / 1024**exponent
+ unit = units[exponent]
+ num_decimals = decimals[exponent]
+ format_string = "{0:.%sf} {1}" % num_decimals
+ return format_string.format(quotient, unit)
+ return "0 bytes" if num == 0 else "1 byte"
diff --git a/Lib/site-packages/imageio/core/findlib.py b/Lib/site-packages/imageio/core/findlib.py
new file mode 100644
index 0000000..76bda52
--- /dev/null
+++ b/Lib/site-packages/imageio/core/findlib.py
@@ -0,0 +1,161 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2015-1018, imageio contributors
+# Copyright (C) 2013, Zach Pincus, Almar Klein and others
+
+""" This module contains generic code to find and load a dynamic library.
+"""
+
+import os
+import sys
+import ctypes
+
+
+LOCALDIR = os.path.abspath(os.path.dirname(__file__))
+
+# Flag that can be patched / set to True to disable loading non-system libs
+SYSTEM_LIBS_ONLY = False
+
+
+def looks_lib(fname):
+ """Returns True if the given filename looks like a dynamic library.
+ Based on extension, but cross-platform and more flexible.
+ """
+ fname = fname.lower()
+ if sys.platform.startswith("win"):
+ return fname.endswith(".dll")
+ elif sys.platform.startswith("darwin"):
+ return fname.endswith(".dylib")
+ else:
+ return fname.endswith(".so") or ".so." in fname
+
+
+def generate_candidate_libs(lib_names, lib_dirs=None):
+ """Generate a list of candidate filenames of what might be the dynamic
+ library corresponding with the given list of names.
+ Returns (lib_dirs, lib_paths)
+ """
+ lib_dirs = lib_dirs or []
+
+ # Get system dirs to search
+ sys_lib_dirs = [
+ "/lib",
+ "/usr/lib",
+ "/usr/lib/x86_64-linux-gnu",
+ "/usr/lib/aarch64-linux-gnu",
+ "/usr/local/lib",
+ "/opt/local/lib",
+ ]
+
+ # Get Python dirs to search (shared if for Pyzo)
+ py_sub_dirs = ["bin", "lib", "DLLs", "Library/bin", "shared"]
+ py_lib_dirs = [os.path.join(sys.prefix, d) for d in py_sub_dirs]
+ if hasattr(sys, "base_prefix"):
+ py_lib_dirs += [os.path.join(sys.base_prefix, d) for d in py_sub_dirs]
+
+ # Get user dirs to search (i.e. HOME)
+ home_dir = os.path.expanduser("~")
+ user_lib_dirs = [os.path.join(home_dir, d) for d in ["lib"]]
+
+ # Select only the dirs for which a directory exists, and remove duplicates
+ potential_lib_dirs = lib_dirs + sys_lib_dirs + py_lib_dirs + user_lib_dirs
+ lib_dirs = []
+ for ld in potential_lib_dirs:
+ if os.path.isdir(ld) and ld not in lib_dirs:
+ lib_dirs.append(ld)
+
+ # Now attempt to find libraries of that name in the given directory
+ # (case-insensitive)
+ lib_paths = []
+ for lib_dir in lib_dirs:
+ # Get files, prefer short names, last version
+ files = os.listdir(lib_dir)
+ files = reversed(sorted(files))
+ files = sorted(files, key=len)
+ for lib_name in lib_names:
+ # Test all filenames for name and ext
+ for fname in files:
+ if fname.lower().startswith(lib_name) and looks_lib(fname):
+ lib_paths.append(os.path.join(lib_dir, fname))
+
+ # Return (only the items which are files)
+ lib_paths = [lp for lp in lib_paths if os.path.isfile(lp)]
+ return lib_dirs, lib_paths
+
+
+def load_lib(exact_lib_names, lib_names, lib_dirs=None):
+ """load_lib(exact_lib_names, lib_names, lib_dirs=None)
+
+ Load a dynamic library.
+
+ This function first tries to load the library from the given exact
+ names. When that fails, it tries to find the library in common
+ locations. It searches for files that start with one of the names
+ given in lib_names (case insensitive). The search is performed in
+ the given lib_dirs and a set of common library dirs.
+
+ Returns ``(ctypes_library, library_path)``
+ """
+
+ # Checks
+ assert isinstance(exact_lib_names, list)
+ assert isinstance(lib_names, list)
+ if lib_dirs is not None:
+ assert isinstance(lib_dirs, list)
+ exact_lib_names = [n for n in exact_lib_names if n]
+ lib_names = [n for n in lib_names if n]
+
+ # Get reference name (for better messages)
+ if lib_names:
+ the_lib_name = lib_names[0]
+ elif exact_lib_names:
+ the_lib_name = exact_lib_names[0]
+ else:
+ raise ValueError("No library name given.")
+
+ # Collect filenames of potential libraries
+ # First try a few bare library names that ctypes might be able to find
+ # in the default locations for each platform.
+ if SYSTEM_LIBS_ONLY:
+ lib_dirs, lib_paths = [], []
+ else:
+ lib_dirs, lib_paths = generate_candidate_libs(lib_names, lib_dirs)
+ lib_paths = exact_lib_names + lib_paths
+
+ # Select loader
+ if sys.platform.startswith("win"):
+ loader = ctypes.windll
+ else:
+ loader = ctypes.cdll
+
+ # Try to load until success
+ the_lib = None
+ errors = []
+ for fname in lib_paths:
+ try:
+ the_lib = loader.LoadLibrary(fname)
+ break
+ except Exception as err:
+ # Don't record errors when it couldn't load the library from an
+ # exact name -- this fails often, and doesn't provide any useful
+ # debugging information anyway, beyond "couldn't find library..."
+ if fname not in exact_lib_names:
+ errors.append((fname, err))
+
+ # No success ...
+ if the_lib is None:
+ if errors:
+ # No library loaded, and load-errors reported for some
+ # candidate libs
+ err_txt = ["%s:\n%s" % (lib, str(e)) for lib, e in errors]
+ msg = (
+ "One or more %s libraries were found, but "
+ + "could not be loaded due to the following errors:\n%s"
+ )
+ raise OSError(msg % (the_lib_name, "\n\n".join(err_txt)))
+ else:
+ # No errors, because no potential libraries found at all!
+ msg = "Could not find a %s library in any of:\n%s"
+ raise OSError(msg % (the_lib_name, "\n".join(lib_dirs)))
+
+ # Done
+ return the_lib, fname
diff --git a/Lib/site-packages/imageio/core/format.py b/Lib/site-packages/imageio/core/format.py
new file mode 100644
index 0000000..109cd8e
--- /dev/null
+++ b/Lib/site-packages/imageio/core/format.py
@@ -0,0 +1,881 @@
+# -*- coding: utf-8 -*-
+# imageio is distributed under the terms of the (new) BSD License.
+
+"""
+
+.. note::
+ imageio is under construction, some details with regard to the
+ Reader and Writer classes may change.
+
+These are the main classes of imageio. They expose an interface for
+advanced users and plugin developers. A brief overview:
+
+ * imageio.FormatManager - for keeping track of registered formats.
+ * imageio.Format - representation of a file format reader/writer
+ * imageio.Format.Reader - object used during the reading of a file.
+ * imageio.Format.Writer - object used during saving a file.
+ * imageio.Request - used to store the filename and other info.
+
+Plugins need to implement a Format class and register
+a format object using ``imageio.formats.add_format()``.
+
+"""
+
+# todo: do we even use the known extensions?
+
+# Some notes:
+#
+# The classes in this module use the Request object to pass filename and
+# related info around. This request object is instantiated in
+# imageio.get_reader and imageio.get_writer.
+
+import sys
+import warnings
+import contextlib
+
+import numpy as np
+from pathlib import Path
+
+from . import Array, asarray
+from .request import ImageMode
+from ..config import known_plugins, known_extensions, PluginConfig, FileExtension
+from ..config.plugins import _original_order
+from .imopen import imopen
+
+
+# survived for backwards compatibility
+# I don't know if external plugin code depends on it existing
+# We no longer do
+MODENAMES = ImageMode
+
+
+def _get_config(plugin):
+ """Old Plugin resolution logic.
+
+ Remove once we remove the old format manager.
+ """
+
+ extension_name = None
+
+ if Path(plugin).suffix.lower() in known_extensions:
+ extension_name = Path(plugin).suffix.lower()
+ elif plugin in known_plugins:
+ pass
+ elif plugin.lower() in known_extensions:
+ extension_name = plugin.lower()
+ elif "." + plugin.lower() in known_extensions:
+ extension_name = "." + plugin.lower()
+ else:
+ raise IndexError(f"No format known by name `{plugin}`.")
+
+ if extension_name is not None:
+ for plugin_name in [
+ x
+ for file_extension in known_extensions[extension_name]
+ for x in file_extension.priority
+ ]:
+ if known_plugins[plugin_name].is_legacy:
+ plugin = plugin_name
+ break
+
+ return known_plugins[plugin]
+
+
+class Format(object):
+ """Represents an implementation to read/write a particular file format
+
+ A format instance is responsible for 1) providing information about
+ a format; 2) determining whether a certain file can be read/written
+ with this format; 3) providing a reader/writer class.
+
+ Generally, imageio will select the right format and use that to
+ read/write an image. A format can also be explicitly chosen in all
+ read/write functions. Use ``print(format)``, or ``help(format_name)``
+ to see its documentation.
+
+ To implement a specific format, one should create a subclass of
+ Format and the Format.Reader and Format.Writer classes. See
+ :class:`imageio.plugins` for details.
+
+ Parameters
+ ----------
+ name : str
+ A short name of this format. Users can select a format using its name.
+ description : str
+ A one-line description of the format.
+ extensions : str | list | None
+ List of filename extensions that this format supports. If a
+ string is passed it should be space or comma separated. The
+ extensions are used in the documentation and to allow users to
+ select a format by file extension. It is not used to determine
+ what format to use for reading/saving a file.
+ modes : str
+ A string containing the modes that this format can handle ('iIvV'),
+ “i” for an image, “I” for multiple images, “v” for a volume,
+ “V” for multiple volumes.
+ This attribute is used in the documentation and to select the
+ formats when reading/saving a file.
+ """
+
+ def __init__(self, name, description, extensions=None, modes=None):
+ """Initialize the Plugin.
+
+ Parameters
+ ----------
+ name : str
+ A short name of this format. Users can select a format using its name.
+ description : str
+ A one-line description of the format.
+ extensions : str | list | None
+ List of filename extensions that this format supports. If a
+ string is passed it should be space or comma separated. The
+ extensions are used in the documentation and to allow users to
+ select a format by file extension. It is not used to determine
+ what format to use for reading/saving a file.
+ modes : str
+ A string containing the modes that this format can handle ('iIvV'),
+ “i” for an image, “I” for multiple images, “v” for a volume,
+ “V” for multiple volumes.
+ This attribute is used in the documentation and to select the
+ formats when reading/saving a file.
+ """
+
+ # Store name and description
+ self._name = name.upper()
+ self._description = description
+
+ # Store extensions, do some effort to normalize them.
+ # They are stored as a list of lowercase strings without leading dots.
+ if extensions is None:
+ extensions = []
+ elif isinstance(extensions, str):
+ extensions = extensions.replace(",", " ").split(" ")
+ #
+ if isinstance(extensions, (tuple, list)):
+ self._extensions = tuple(
+ ["." + e.strip(".").lower() for e in extensions if e]
+ )
+ else:
+ raise ValueError("Invalid value for extensions given.")
+
+ # Store mode
+ self._modes = modes or ""
+ if not isinstance(self._modes, str):
+ raise ValueError("Invalid value for modes given.")
+ for m in self._modes:
+ if m not in "iIvV?":
+ raise ValueError("Invalid value for mode given.")
+
+ def __repr__(self):
+ # Short description
+ return "" % (self.name, self.description)
+
+ def __str__(self):
+ return self.doc
+
+ @property
+ def doc(self):
+ """The documentation for this format (name + description + docstring)."""
+ # Our docsring is assumed to be indented by four spaces. The
+ # first line needs special attention.
+ return "%s - %s\n\n %s\n" % (
+ self.name,
+ self.description,
+ self.__doc__.strip(),
+ )
+
+ @property
+ def name(self):
+ """The name of this format."""
+ return self._name
+
+ @property
+ def description(self):
+ """A short description of this format."""
+ return self._description
+
+ @property
+ def extensions(self):
+ """A list of file extensions supported by this plugin.
+ These are all lowercase with a leading dot.
+ """
+ return self._extensions
+
+ @property
+ def modes(self):
+ """A string specifying the modes that this format can handle."""
+ return self._modes
+
+ def get_reader(self, request):
+ """get_reader(request)
+
+ Return a reader object that can be used to read data and info
+ from the given file. Users are encouraged to use
+ imageio.get_reader() instead.
+ """
+ select_mode = request.mode[1] if request.mode[1] in "iIvV" else ""
+ if select_mode not in self.modes:
+ raise RuntimeError(
+ f"Format {self.name} cannot read in {request.mode.image_mode} mode"
+ )
+ return self.Reader(self, request)
+
+ def get_writer(self, request):
+ """get_writer(request)
+
+ Return a writer object that can be used to write data and info
+ to the given file. Users are encouraged to use
+ imageio.get_writer() instead.
+ """
+ select_mode = request.mode[1] if request.mode[1] in "iIvV" else ""
+ if select_mode not in self.modes:
+ raise RuntimeError(
+ f"Format {self.name} cannot write in {request.mode.image_mode} mode"
+ )
+ return self.Writer(self, request)
+
+ def can_read(self, request):
+ """can_read(request)
+
+ Get whether this format can read data from the specified uri.
+ """
+ return self._can_read(request)
+
+ def can_write(self, request):
+ """can_write(request)
+
+ Get whether this format can write data to the speciefed uri.
+ """
+ return self._can_write(request)
+
+ def _can_read(self, request): # pragma: no cover
+ """Check if Plugin can read from ImageResource.
+
+ This method is called when the format manager is searching for a format
+ to read a certain image. Return True if this format can do it.
+
+ The format manager is aware of the extensions and the modes that each
+ format can handle. It will first ask all formats that *seem* to be able
+ to read it whether they can. If none can, it will ask the remaining
+ formats if they can: the extension might be missing, and this allows
+ formats to provide functionality for certain extensions, while giving
+ preference to other plugins.
+
+ If a format says it can, it should live up to it. The format would
+ ideally check the request.firstbytes and look for a header of some kind.
+
+ Parameters
+ ----------
+ request : Request
+ A request that can be used to access the ImageResource and obtain
+ metadata about it.
+
+ Returns
+ -------
+ can_read : bool
+ True if the plugin can read from the ImageResource, False otherwise.
+
+ """
+ return None # Plugins must implement this
+
+ def _can_write(self, request): # pragma: no cover
+ """Check if Plugin can write to ImageResource.
+
+ Parameters
+ ----------
+ request : Request
+ A request that can be used to access the ImageResource and obtain
+ metadata about it.
+
+ Returns
+ -------
+ can_read : bool
+ True if the plugin can write to the ImageResource, False otherwise.
+
+ """
+ return None # Plugins must implement this
+
+ # -----
+
+ class _BaseReaderWriter(object):
+ """Base class for the Reader and Writer class to implement common
+ functionality. It implements a similar approach for opening/closing
+ and context management as Python's file objects.
+ """
+
+ def __init__(self, format, request):
+ self.__closed = False
+ self._BaseReaderWriter_last_index = -1
+ self._format = format
+ self._request = request
+ # Open the reader/writer
+ self._open(**self.request.kwargs.copy())
+
+ @property
+ def format(self):
+ """The :class:`.Format` object corresponding to the current
+ read/write operation.
+ """
+ return self._format
+
+ @property
+ def request(self):
+ """The :class:`.Request` object corresponding to the
+ current read/write operation.
+ """
+ return self._request
+
+ def __enter__(self):
+ self._checkClosed()
+ return self
+
+ def __exit__(self, type, value, traceback):
+ if value is None:
+ # Otherwise error in close hide the real error.
+ self.close()
+
+ def __del__(self):
+ try:
+ self.close()
+ except Exception: # pragma: no cover
+ pass # Suppress noise when called during interpreter shutdown
+
+ def close(self):
+ """Flush and close the reader/writer.
+ This method has no effect if it is already closed.
+ """
+ if self.__closed:
+ return
+ self.__closed = True
+ self._close()
+ # Process results and clean request object
+ self.request.finish()
+
+ @property
+ def closed(self):
+ """Whether the reader/writer is closed."""
+ return self.__closed
+
+ def _checkClosed(self, msg=None):
+ """Internal: raise an ValueError if reader/writer is closed"""
+ if self.closed:
+ what = self.__class__.__name__
+ msg = msg or ("I/O operation on closed %s." % what)
+ raise RuntimeError(msg)
+
+ # To implement
+
+ def _open(self, **kwargs):
+ """_open(**kwargs)
+
+ Plugins should probably implement this.
+
+ It is called when reader/writer is created. Here the
+ plugin can do its initialization. The given keyword arguments
+ are those that were given by the user at imageio.read() or
+ imageio.write().
+ """
+ raise NotImplementedError()
+
+ def _close(self):
+ """_close()
+
+ Plugins should probably implement this.
+
+ It is called when the reader/writer is closed. Here the plugin
+ can do a cleanup, flush, etc.
+
+ """
+ raise NotImplementedError()
+
+ # -----
+
+ class Reader(_BaseReaderWriter):
+ """
+ The purpose of a reader object is to read data from an image
+ resource, and should be obtained by calling :func:`.get_reader`.
+
+ A reader can be used as an iterator to read multiple images,
+ and (if the format permits) only reads data from the file when
+ new data is requested (i.e. streaming). A reader can also be
+ used as a context manager so that it is automatically closed.
+
+ Plugins implement Reader's for different formats. Though rare,
+ plugins may provide additional functionality (beyond what is
+ provided by the base reader class).
+ """
+
+ def get_length(self):
+ """get_length()
+
+ Get the number of images in the file. (Note: you can also
+ use ``len(reader_object)``.)
+
+ The result can be:
+ * 0 for files that only have meta data
+ * 1 for singleton images (e.g. in PNG, JPEG, etc.)
+ * N for image series
+ * inf for streams (series of unknown length)
+ """
+ return self._get_length()
+
+ def get_data(self, index, **kwargs):
+ """get_data(index, **kwargs)
+
+ Read image data from the file, using the image index. The
+ returned image has a 'meta' attribute with the meta data.
+ Raises IndexError if the index is out of range.
+
+ Some formats may support additional keyword arguments. These are
+ listed in the documentation of those formats.
+ """
+ self._checkClosed()
+ self._BaseReaderWriter_last_index = index
+ try:
+ im, meta = self._get_data(index, **kwargs)
+ except StopIteration:
+ raise IndexError(index)
+ return Array(im, meta) # Array tests im and meta
+
+ def get_next_data(self, **kwargs):
+ """get_next_data(**kwargs)
+
+ Read the next image from the series.
+
+ Some formats may support additional keyword arguments. These are
+ listed in the documentation of those formats.
+ """
+ return self.get_data(self._BaseReaderWriter_last_index + 1, **kwargs)
+
+ def set_image_index(self, index, **kwargs):
+ """set_image_index(index)
+
+ Set the internal pointer such that the next call to
+ get_next_data() returns the image specified by the index
+ """
+ self._checkClosed()
+ n = self.get_length()
+ self._BaseReaderWriter_last_index = min(max(index - 1, -1), n)
+
+ def get_meta_data(self, index=None):
+ """get_meta_data(index=None)
+
+ Read meta data from the file. using the image index. If the
+ index is omitted or None, return the file's (global) meta data.
+
+ Note that ``get_data`` also provides the meta data for the returned
+ image as an attribute of that image.
+
+ The meta data is a dict, which shape depends on the format.
+ E.g. for JPEG, the dict maps group names to subdicts and each
+ group is a dict with name-value pairs. The groups represent
+ the different metadata formats (EXIF, XMP, etc.).
+ """
+ self._checkClosed()
+ meta = self._get_meta_data(index)
+ if not isinstance(meta, dict):
+ raise ValueError(
+ "Meta data must be a dict, not %r" % meta.__class__.__name__
+ )
+ return meta
+
+ def iter_data(self):
+ """iter_data()
+
+ Iterate over all images in the series. (Note: you can also
+ iterate over the reader object.)
+
+ """
+ self._checkClosed()
+ n = self.get_length()
+ i = 0
+ while i < n:
+ try:
+ im, meta = self._get_data(i)
+ except StopIteration:
+ return
+ except IndexError:
+ if n == float("inf"):
+ return
+ raise
+ yield Array(im, meta)
+ i += 1
+
+ # Compatibility
+
+ def __iter__(self):
+ return self.iter_data()
+
+ def __len__(self):
+ n = self.get_length()
+ if n == float("inf"):
+ n = sys.maxsize
+ return n
+
+ # To implement
+
+ def _get_length(self):
+ """_get_length()
+
+ Plugins must implement this.
+
+ The returned scalar specifies the number of images in the series.
+ See Reader.get_length for more information.
+ """
+ raise NotImplementedError()
+
+ def _get_data(self, index):
+ """_get_data()
+
+ Plugins must implement this, but may raise an IndexError in
+ case the plugin does not support random access.
+
+ It should return the image and meta data: (ndarray, dict).
+ """
+ raise NotImplementedError()
+
+ def _get_meta_data(self, index):
+ """_get_meta_data(index)
+
+ Plugins must implement this.
+
+ It should return the meta data as a dict, corresponding to the
+ given index, or to the file's (global) meta data if index is
+ None.
+ """
+ raise NotImplementedError()
+
+ # -----
+
+ class Writer(_BaseReaderWriter):
+ """
+ The purpose of a writer object is to write data to an image
+ resource, and should be obtained by calling :func:`.get_writer`.
+
+ A writer will (if the format permits) write data to the file
+ as soon as new data is provided (i.e. streaming). A writer can
+ also be used as a context manager so that it is automatically
+ closed.
+
+ Plugins implement Writer's for different formats. Though rare,
+ plugins may provide additional functionality (beyond what is
+ provided by the base writer class).
+ """
+
+ def append_data(self, im, meta=None):
+ """append_data(im, meta={})
+
+ Append an image (and meta data) to the file. The final meta
+ data that is used consists of the meta data on the given
+ image (if applicable), updated with the given meta data.
+ """
+ self._checkClosed()
+ # Check image data
+ if not isinstance(im, np.ndarray):
+ raise ValueError("append_data requires ndarray as first arg")
+ # Get total meta dict
+ total_meta = {}
+ if hasattr(im, "meta") and isinstance(im.meta, dict):
+ total_meta.update(im.meta)
+ if meta is None:
+ pass
+ elif not isinstance(meta, dict):
+ raise ValueError("Meta must be a dict.")
+ else:
+ total_meta.update(meta)
+
+ # Decouple meta info
+ im = asarray(im)
+ # Call
+ return self._append_data(im, total_meta)
+
+ def set_meta_data(self, meta):
+ """set_meta_data(meta)
+
+ Sets the file's (global) meta data. The meta data is a dict which
+ shape depends on the format. E.g. for JPEG the dict maps
+ group names to subdicts, and each group is a dict with
+ name-value pairs. The groups represents the different
+ metadata formats (EXIF, XMP, etc.).
+
+ Note that some meta formats may not be supported for
+ writing, and individual fields may be ignored without
+ warning if they are invalid.
+ """
+ self._checkClosed()
+ if not isinstance(meta, dict):
+ raise ValueError("Meta must be a dict.")
+ else:
+ return self._set_meta_data(meta)
+
+ # To implement
+
+ def _append_data(self, im, meta):
+ # Plugins must implement this
+ raise NotImplementedError()
+
+ def _set_meta_data(self, meta):
+ # Plugins must implement this
+ raise NotImplementedError()
+
+
+class FormatManager(object):
+ """
+ The FormatManager is a singleton plugin factory.
+
+ The format manager supports getting a format object using indexing (by
+ format name or extension). When used as an iterator, this object
+ yields all registered format objects.
+
+ See also :func:`.help`.
+ """
+
+ @property
+ def _formats(self):
+ available_formats = list()
+
+ for config in known_plugins.values():
+ with contextlib.suppress(ImportError):
+ # if an exception is raised, then format not installed
+ if config.is_legacy and config.format is not None:
+ available_formats.append(config)
+
+ return available_formats
+
+ def __repr__(self):
+ return f""
+
+ def __iter__(self):
+ return iter(x.format for x in self._formats)
+
+ def __len__(self):
+ return len(self._formats)
+
+ def __str__(self):
+ ss = []
+ for config in self._formats:
+ ext = config.legacy_args["extensions"]
+ desc = config.legacy_args["description"]
+ s = f"{config.name} - {desc} [{ext}]"
+ ss.append(s)
+ return "\n".join(ss)
+
+ def __getitem__(self, name):
+ warnings.warn(
+ "The usage of `FormatManager` is deprecated and it will be "
+ "removed in Imageio v3. Use `iio.imopen` instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ if not isinstance(name, str):
+ raise ValueError(
+ "Looking up a format should be done by name or by extension."
+ )
+
+ if name == "":
+ raise ValueError("No format matches the empty string.")
+
+ # Test if name is existing file
+ if Path(name).is_file():
+ # legacy compatibility - why test reading here??
+ try:
+ return imopen(name, "r", legacy_mode=True)._format
+ except ValueError:
+ # no plugin can read the file
+ pass
+
+ config = _get_config(name.upper())
+
+ try:
+ return config.format
+ except ImportError:
+ raise ImportError(
+ f"The `{config.name}` format is not installed. "
+ f"Use `pip install imageio[{config.install_name}]` to install it."
+ )
+
+ def sort(self, *names):
+ """sort(name1, name2, name3, ...)
+
+ Sort the formats based on zero or more given names; a format with
+ a name that matches one of the given names will take precedence
+ over other formats. A match means an equal name, or ending with
+ that name (though the former counts higher). Case insensitive.
+
+ Format preference will match the order of the given names: using
+ ``sort('TIFF', '-FI', '-PIL')`` would prefer the FreeImage formats
+ over the Pillow formats, but prefer TIFF even more. Each time
+ this is called, the starting point is the default format order,
+ and calling ``sort()`` with no arguments will reset the order.
+
+ Be aware that using the function can affect the behavior of
+ other code that makes use of imageio.
+
+ Also see the ``IMAGEIO_FORMAT_ORDER`` environment variable.
+ """
+
+ warnings.warn(
+ "`FormatManager` is deprecated and it will be removed in ImageIO v3."
+ " Migrating `FormatManager.sort` depends on your use-case:\n"
+ "\t- modify `iio.config.known_plugins` to specify the search order for "
+ "unrecognized formats.\n"
+ "\t- modify `iio.config.known_extensions[].priority`"
+ " to control a specific extension.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ # Check and sanitize input
+ for name in names:
+ if not isinstance(name, str):
+ raise TypeError("formats.sort() accepts only string names.")
+ if any(c in name for c in ".,"):
+ raise ValueError(
+ "Names given to formats.sort() should not "
+ "contain dots `.` or commas `,`."
+ )
+
+ should_reset = len(names) == 0
+ if should_reset:
+ names = _original_order
+
+ sane_names = [name.strip().upper() for name in names if name != ""]
+
+ # enforce order for every extension that uses it
+ flat_extensions = [
+ ext for ext_list in known_extensions.values() for ext in ext_list
+ ]
+ for extension in flat_extensions:
+ if should_reset:
+ extension.reset()
+ continue
+
+ for name in reversed(sane_names):
+ for plugin in [x for x in extension.default_priority]:
+ if plugin.endswith(name):
+ extension.priority.remove(plugin)
+ extension.priority.insert(0, plugin)
+
+ old_order = known_plugins.copy()
+ known_plugins.clear()
+
+ for name in sane_names:
+ plugin = old_order.pop(name, None)
+ if plugin is not None:
+ known_plugins[name] = plugin
+
+ known_plugins.update(old_order)
+
+ def add_format(self, iio_format, overwrite=False):
+ """add_format(format, overwrite=False)
+
+ Register a format, so that imageio can use it. If a format with the
+ same name already exists, an error is raised, unless overwrite is True,
+ in which case the current format is replaced.
+ """
+
+ warnings.warn(
+ "`FormatManager` is deprecated and it will be removed in ImageIO v3."
+ "To migrate `FormatManager.add_format` add the plugin directly to "
+ "`iio.config.known_plugins`.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ if not isinstance(iio_format, Format):
+ raise ValueError("add_format needs argument to be a Format object")
+ elif not overwrite and iio_format.name in self.get_format_names():
+ raise ValueError(
+ f"A Format named {iio_format.name} is already registered, use"
+ " `overwrite=True` to replace."
+ )
+
+ config = PluginConfig(
+ name=iio_format.name.upper(),
+ class_name=iio_format.__class__.__name__,
+ module_name=iio_format.__class__.__module__,
+ is_legacy=True,
+ install_name="unknown",
+ legacy_args={
+ "name": iio_format.name,
+ "description": iio_format.description,
+ "extensions": " ".join(iio_format.extensions),
+ "modes": iio_format.modes,
+ },
+ )
+
+ known_plugins[config.name] = config
+
+ for extension in iio_format.extensions:
+ # be conservative and always treat it as a unique file format
+ ext = FileExtension(
+ extension=extension,
+ priority=[config.name],
+ name="Unique Format",
+ description="A format inserted at runtime."
+ f" It is being read by the `{config.name}` plugin.",
+ )
+ known_extensions.setdefault(extension, list()).append(ext)
+
+ def search_read_format(self, request):
+ """search_read_format(request)
+
+ Search a format that can read a file according to the given request.
+ Returns None if no appropriate format was found. (used internally)
+ """
+
+ try:
+ # in legacy_mode imopen returns a LegacyPlugin
+ return imopen(request, request.mode.io_mode, legacy_mode=True)._format
+ except AttributeError:
+ warnings.warn(
+ "ImageIO now uses a v3 plugin when reading this format."
+ " Please migrate to the v3 API (preferred) or use imageio.v2.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return None
+ except ValueError:
+ # no plugin can read this request
+ # but the legacy API doesn't raise
+ return None
+
+ def search_write_format(self, request):
+ """search_write_format(request)
+
+ Search a format that can write a file according to the given request.
+ Returns None if no appropriate format was found. (used internally)
+ """
+
+ try:
+ # in legacy_mode imopen returns a LegacyPlugin
+ return imopen(request, request.mode.io_mode, legacy_mode=True)._format
+ except AttributeError:
+ warnings.warn(
+ "ImageIO now uses a v3 plugin when writing this format."
+ " Please migrate to the v3 API (preferred) or use imageio.v2.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return None
+ except ValueError:
+ # no plugin can write this request
+ # but the legacy API doesn't raise
+ return None
+
+ def get_format_names(self):
+ """Get the names of all registered formats."""
+
+ warnings.warn(
+ "`FormatManager` is deprecated and it will be removed in ImageIO v3."
+ "To migrate `FormatManager.get_format_names` use `iio.config.known_plugins.keys()` instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ return [f.name for f in self._formats]
+
+ def show(self):
+ """Show a nicely formatted list of available formats"""
+ print(self)
diff --git a/Lib/site-packages/imageio/core/format.pyi b/Lib/site-packages/imageio/core/format.pyi
new file mode 100644
index 0000000..c1c10b1
--- /dev/null
+++ b/Lib/site-packages/imageio/core/format.pyi
@@ -0,0 +1,87 @@
+from typing import Any, Dict, List, Optional, Union
+
+import numpy as np
+
+from ..typing import ArrayLike
+from . import Array
+from .request import Request
+from ..config import PluginConfig
+
+def _get_config(plugin: str) -> PluginConfig: ...
+
+class Format(object):
+ @property
+ def doc(self) -> str: ...
+ @property
+ def name(self) -> str: ...
+ @property
+ def description(self) -> str: ...
+ @property
+ def extensions(self) -> List[str]: ...
+ @property
+ def modes(self) -> str: ...
+ def __init__(
+ self,
+ name: str,
+ description: str,
+ extensions: Union[str, list, tuple, None] = None,
+ modes: str = None,
+ ) -> None: ...
+ def __repr__(self) -> str: ...
+ def __str__(self) -> str: ...
+ def get_reader(self, request: Request) -> Reader: ...
+ def get_writer(self, request: Request) -> Writer: ...
+ def can_read(self, request: Request) -> bool: ...
+ def can_write(self, request: Request) -> bool: ...
+ def _can_read(self, request: Request) -> bool: ...
+ def _can_write(self, request: Request) -> bool: ...
+
+ class _BaseReaderWriter(object):
+ @property
+ def format(self) -> Format: ...
+ @property
+ def request(self) -> Request: ...
+ @property
+ def closed(self) -> bool: ...
+ def __init__(self, format: Format, request: Request) -> None: ...
+ def __enter__(self) -> Format._BaseReaderWriter: ...
+ def __exit__(self, type, value, traceback) -> None: ...
+ def __del__(self) -> None: ...
+ def close(self) -> None: ...
+ def _checkClosed(self, msg=None) -> None: ...
+ def _open(self, **kwargs) -> None: ...
+ def _close(self) -> None: ...
+
+ class Reader(_BaseReaderWriter):
+ def get_length(self) -> int: ...
+ def get_data(self, index: int, **kwargs) -> Array: ...
+ def get_next_data(self, **kwargs) -> Dict[str, Any]: ...
+ def set_image_index(self, index: int, **kwargs) -> None: ...
+ def get_meta_data(self, index: int = None) -> Dict[str, Any]: ...
+ def iter_data(self) -> Array: ...
+ def __iter__(self) -> Array: ...
+ def __len__(self) -> int: ...
+ def _get_length(self) -> int: ...
+ def _get_data(self, index: int) -> Array: ...
+ def _get_meta_data(self, index: int) -> Dict[str, Any]: ...
+
+ class Writer(_BaseReaderWriter):
+ def append_data(self, im: ArrayLike, meta: Dict[str, Any] = None) -> None: ...
+ def set_meta_data(self, meta: Dict[str, Any]) -> None: ...
+ def _append_data(self, im: ArrayLike, meta: Dict[str, Any]) -> None: ...
+ def _set_meta_data(self, meta: Dict[str, Any]) -> None: ...
+
+class FormatManager(object):
+ @property
+ def _formats(self) -> List[Format]: ...
+ def __repr__(self) -> str: ...
+ def __iter__(self) -> Format: ...
+ def __len__(self) -> int: ...
+ def __str__(self) -> str: ...
+ def __getitem__(self, name: str) -> Format: ...
+ def sort(self, *names: str) -> None: ...
+ def add_format(self, iio_format: Format, overwrite: bool = False) -> None: ...
+ def search_read_format(self, request: Request) -> Optional[Format]: ...
+ def search_write_format(self, request: Request) -> Optional[Format]: ...
+ def get_format_names(self) -> List[str]: ...
+ def show(self) -> None: ...
diff --git a/Lib/site-packages/imageio/core/imopen.py b/Lib/site-packages/imageio/core/imopen.py
new file mode 100644
index 0000000..a84b2a9
--- /dev/null
+++ b/Lib/site-packages/imageio/core/imopen.py
@@ -0,0 +1,281 @@
+from pathlib import Path
+import warnings
+
+from ..config import known_plugins
+from ..config.extensions import known_extensions
+from .request import (
+ SPECIAL_READ_URIS,
+ URI_FILENAME,
+ InitializationError,
+ IOMode,
+ Request,
+)
+
+
+def imopen(
+ uri,
+ io_mode,
+ *,
+ plugin=None,
+ extension=None,
+ format_hint=None,
+ legacy_mode=False,
+ **kwargs,
+):
+ """Open an ImageResource.
+
+ .. warning::
+ This warning is for pypy users. If you are not using a context manager,
+ remember to deconstruct the returned plugin to avoid leaking the file
+ handle to an unclosed file.
+
+ Parameters
+ ----------
+ uri : str or pathlib.Path or bytes or file or Request
+ The :doc:`ImageResource <../../user_guide/requests>` to load the
+ image from.
+ io_mode : str
+ The mode in which the file is opened. Possible values are::
+
+ ``r`` - open the file for reading
+ ``w`` - open the file for writing
+
+ Depreciated since v2.9:
+ A second character can be added to give the reader a hint on what
+ the user expects. This will be ignored by new plugins and will
+ only have an effect on legacy plugins. Possible values are::
+
+ ``i`` for a single image,
+ ``I`` for multiple images,
+ ``v`` for a single volume,
+ ``V`` for multiple volumes,
+ ``?`` for don't care
+
+ plugin : str, Plugin, or None
+ The plugin to use. If set to None imopen will perform a
+ search for a matching plugin. If not None, this takes priority over
+ the provided format hint.
+ extension : str
+ If not None, treat the provided ImageResource as if it had the given
+ extension. This affects the order in which backends are considered, and
+ when writing this may also influence the format used when encoding.
+ format_hint : str
+ Deprecated. Use `extension` instead.
+ legacy_mode : bool
+ If true use the v2 behavior when searching for a suitable
+ plugin. This will ignore v3 plugins and will check ``plugin``
+ against known extensions if no plugin with the given name can be found.
+ **kwargs : Any
+ Additional keyword arguments will be passed to the plugin upon
+ construction.
+
+ Notes
+ -----
+ Registered plugins are controlled via the ``known_plugins`` dict in
+ ``imageio.config``.
+
+ Passing a ``Request`` as the uri is only supported if ``legacy_mode``
+ is ``True``. In this case ``io_mode`` is ignored.
+
+ Using the kwarg ``format_hint`` does not enforce the given format. It merely
+ provides a `hint` to the selection process and plugin. The selection
+ processes uses this hint for optimization; however, a plugin's decision how
+ to read a ImageResource will - typically - still be based on the content of
+ the resource.
+
+
+ Examples
+ --------
+
+ >>> import imageio.v3 as iio
+ >>> with iio.imopen("/path/to/image.png", "r") as file:
+ >>> im = file.read()
+
+ >>> with iio.imopen("/path/to/output.jpg", "w") as file:
+ >>> file.write(im)
+
+ """
+
+ if isinstance(uri, Request) and legacy_mode:
+ warnings.warn(
+ "`iio.core.Request` is a low-level object and using it"
+ " directly as input to `imopen` is discouraged. This will raise"
+ " an exception in ImageIO v3.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ request = uri
+ uri = request.raw_uri
+ io_mode = request.mode.io_mode
+ request.format_hint = format_hint
+ else:
+ request = Request(uri, io_mode, format_hint=format_hint, extension=extension)
+
+ source = "" if isinstance(uri, bytes) else uri
+
+ # fast-path based on plugin
+ # (except in legacy mode)
+ if plugin is not None:
+ if isinstance(plugin, str):
+ try:
+ config = known_plugins[plugin]
+ except KeyError:
+ request.finish()
+ raise ValueError(
+ f"`{plugin}` is not a registered plugin name."
+ ) from None
+
+ def loader(request, **kwargs):
+ return config.plugin_class(request, **kwargs)
+
+ else:
+
+ def loader(request, **kwargs):
+ return plugin(request, **kwargs)
+
+ try:
+ return loader(request, **kwargs)
+ except InitializationError as class_specific:
+ err_from = class_specific
+ err_type = RuntimeError if legacy_mode else IOError
+ err_msg = f"`{plugin}` can not handle the given uri."
+ except ImportError:
+ err_from = None
+ err_type = ImportError
+ err_msg = (
+ f"The `{config.name}` plugin is not installed. "
+ f"Use `pip install imageio[{config.install_name}]` to install it."
+ )
+ except Exception as generic_error:
+ err_from = generic_error
+ err_type = IOError
+ err_msg = f"An unknown error occurred while initializing plugin `{plugin}`."
+
+ request.finish()
+ raise err_type(err_msg) from err_from
+
+ # fast-path based on format_hint
+ if request.format_hint is not None:
+ for candidate_format in known_extensions[format_hint]:
+ for plugin_name in candidate_format.priority:
+ config = known_plugins[plugin_name]
+
+ try:
+ candidate_plugin = config.plugin_class
+ except ImportError:
+ # not installed
+ continue
+
+ try:
+ plugin_instance = candidate_plugin(request, **kwargs)
+ except InitializationError:
+ # file extension doesn't match file type
+ continue
+
+ return plugin_instance
+ else:
+ resource = (
+ "" if isinstance(request.raw_uri, bytes) else request.raw_uri
+ )
+ warnings.warn(f"`{resource}` can not be opened as a `{format_hint}` file.")
+
+ # fast-path based on file extension
+ if request.extension in known_extensions:
+ for candidate_format in known_extensions[request.extension]:
+ for plugin_name in candidate_format.priority:
+ config = known_plugins[plugin_name]
+
+ try:
+ candidate_plugin = config.plugin_class
+ except ImportError:
+ # not installed
+ continue
+
+ try:
+ plugin_instance = candidate_plugin(request, **kwargs)
+ except InitializationError:
+ # file extension doesn't match file type
+ continue
+
+ return plugin_instance
+
+ # error out for read-only special targets
+ # this is hacky; can we come up with a better solution for this?
+ if request.mode.io_mode == IOMode.write:
+ if isinstance(uri, str) and uri.startswith(SPECIAL_READ_URIS):
+ request.finish()
+ err_type = ValueError if legacy_mode else IOError
+ err_msg = f"`{source}` is read-only."
+ raise err_type(err_msg)
+
+ # error out for directories
+ # this is a bit hacky and should be cleaned once we decide
+ # how to gracefully handle DICOM
+ if request._uri_type == URI_FILENAME and Path(request.raw_uri).is_dir():
+ request.finish()
+ err_type = ValueError if legacy_mode else IOError
+ err_msg = (
+ "ImageIO does not generally support reading folders. "
+ "Limited support may be available via specific plugins. "
+ "Specify the plugin explicitly using the `plugin` kwarg, e.g. `plugin='DICOM'`"
+ )
+ raise err_type(err_msg)
+
+ # close the current request here and use fresh/new ones while trying each
+ # plugin This is slow (means potentially reopening a resource several
+ # times), but should only happen rarely because this is the fallback if all
+ # else fails.
+ request.finish()
+
+ # fallback option: try all plugins
+ for config in known_plugins.values():
+ # each plugin gets its own request
+ request = Request(uri, io_mode, format_hint=format_hint)
+
+ try:
+ plugin_instance = config.plugin_class(request, **kwargs)
+ except InitializationError:
+ continue
+ except ImportError:
+ continue
+ else:
+ return plugin_instance
+
+ err_type = ValueError if legacy_mode else IOError
+ err_msg = f"Could not find a backend to open `{source}`` with iomode `{io_mode}`."
+
+ # check if a missing plugin could help
+ if request.extension in known_extensions:
+ missing_plugins = list()
+
+ formats = known_extensions[request.extension]
+ plugin_names = [
+ plugin for file_format in formats for plugin in file_format.priority
+ ]
+ for name in plugin_names:
+ config = known_plugins[name]
+
+ try:
+ config.plugin_class
+ continue
+ except ImportError:
+ missing_plugins.append(config)
+
+ if len(missing_plugins) > 0:
+ install_candidates = "\n".join(
+ [
+ (
+ f" {config.name}: "
+ f"pip install imageio[{config.install_name}]"
+ )
+ for config in missing_plugins
+ ]
+ )
+ err_msg += (
+ "\nBased on the extension, the following plugins might add capable backends:\n"
+ f"{install_candidates}"
+ )
+
+ request.finish()
+ raise err_type(err_msg)
diff --git a/Lib/site-packages/imageio/core/imopen.pyi b/Lib/site-packages/imageio/core/imopen.pyi
new file mode 100644
index 0000000..0065959
--- /dev/null
+++ b/Lib/site-packages/imageio/core/imopen.pyi
@@ -0,0 +1,87 @@
+from typing import Literal, Type, TypeVar, overload
+
+from ..plugins.opencv import OpenCVPlugin
+from ..plugins.pillow import PillowPlugin
+from ..plugins.pyav import PyAVPlugin
+from ..plugins.tifffile_v3 import TifffilePlugin
+from ..typing import ImageResource
+from .legacy_plugin_wrapper import LegacyPlugin
+from .v3_plugin_api import PluginV3
+
+CustomPlugin = TypeVar("CustomPlugin", bound=PluginV3)
+
+@overload
+def imopen(
+ uri: ImageResource,
+ io_mode: Literal["r", "w"],
+ *,
+ extension: str = None,
+ format_hint: str = None,
+) -> PluginV3: ...
+@overload
+def imopen(
+ uri: ImageResource,
+ io_mode: Literal["r", "w"],
+ *,
+ plugin: str = None,
+ format_hint: str = None,
+ extension: str = None,
+ legacy_mode: Literal[True],
+ **kwargs,
+) -> LegacyPlugin: ...
+@overload
+def imopen(
+ uri: ImageResource,
+ io_mode: Literal["r", "w"],
+ *,
+ format_hint: str = None,
+ extension: str = None,
+ legacy_mode: Literal[False] = False,
+) -> PluginV3: ...
+@overload
+def imopen(
+ uri: ImageResource,
+ io_mode: Literal["r", "w"],
+ *,
+ plugin: Literal["pillow"],
+ extension: str = None,
+ format_hint: str = None,
+) -> PillowPlugin: ...
+@overload
+def imopen(
+ uri: ImageResource,
+ io_mode: Literal["r", "w"],
+ *,
+ plugin: Literal["pyav"],
+ extension: str = None,
+ format_hint: str = None,
+ container: str = None,
+) -> PyAVPlugin: ...
+@overload
+def imopen(
+ uri,
+ io_mode: Literal["r", "w"],
+ *,
+ plugin: Literal["opencv"],
+ extension: str = None,
+ format_hint: str = None,
+) -> OpenCVPlugin: ...
+@overload
+def imopen(
+ uri,
+ io_mode: Literal["r", "w"],
+ *,
+ plugin: Literal["tifffile"],
+ extension: str = None,
+ format_hint: str = None,
+) -> TifffilePlugin: ...
+@overload
+def imopen(
+ uri: ImageResource,
+ io_mode: Literal["r", "w"],
+ *,
+ plugin: Type[CustomPlugin],
+ extension: str = None,
+ format_hint: str = None,
+ **kwargs,
+) -> CustomPlugin: ...
diff --git a/Lib/site-packages/imageio/core/legacy_plugin_wrapper.py b/Lib/site-packages/imageio/core/legacy_plugin_wrapper.py
new file mode 100644
index 0000000..71d2a0a
--- /dev/null
+++ b/Lib/site-packages/imageio/core/legacy_plugin_wrapper.py
@@ -0,0 +1,363 @@
+from pathlib import Path
+
+import numpy as np
+
+from ..config import known_extensions
+from .request import InitializationError, IOMode
+from .v3_plugin_api import ImageProperties, PluginV3
+
+
+def _legacy_default_index(format):
+ if format._name == "FFMPEG":
+ index = Ellipsis
+ elif format._name == "GIF-PIL":
+ index = Ellipsis
+ else:
+ index = 0
+
+ return index
+
+
+class LegacyPlugin(PluginV3):
+ """A plugin to make old (v2.9) plugins compatible with v3.0
+
+ .. depreciated:: 2.9
+ `legacy_get_reader` will be removed in a future version of imageio.
+ `legacy_get_writer` will be removed in a future version of imageio.
+
+ This plugin is a wrapper around the old FormatManager class and exposes
+ all the old plugins via the new API. On top of this it has
+ ``legacy_get_reader`` and ``legacy_get_writer`` methods to allow using
+ it with the v2.9 API.
+
+ Methods
+ -------
+ read(index=None, **kwargs)
+ Read the image at position ``index``.
+ write(image, **kwargs)
+ Write image to the URI.
+ iter(**kwargs)
+ Iteratively yield images from the given URI.
+ get_meta(index=None)
+ Return the metadata for the image at position ``index``.
+ legacy_get_reader(**kwargs)
+ Returns the v2.9 image reader. (depreciated)
+ legacy_get_writer(**kwargs)
+ Returns the v2.9 image writer. (depreciated)
+
+ Examples
+ --------
+
+ >>> import imageio.v3 as iio
+ >>> with iio.imopen("/path/to/image.tiff", "r", legacy_mode=True) as file:
+ >>> reader = file.legacy_get_reader() # depreciated
+ >>> for im in file.iter():
+ >>> print(im.shape)
+
+ """
+
+ def __init__(self, request, legacy_plugin):
+ """Instantiate a new Legacy Plugin
+
+ Parameters
+ ----------
+ uri : {str, pathlib.Path, bytes, file}
+ The resource to load the image from, e.g. a filename, pathlib.Path,
+ http address or file object, see the docs for more info.
+ legacy_plugin : Format
+ The (legacy) format to use to interface with the URI.
+
+ """
+ self._request = request
+ self._format = legacy_plugin
+
+ source = (
+ ""
+ if isinstance(self._request.raw_uri, bytes)
+ else self._request.raw_uri
+ )
+ if self._request.mode.io_mode == IOMode.read:
+ if not self._format.can_read(request):
+ raise InitializationError(
+ f"`{self._format.name}`" f" can not read `{source}`."
+ )
+ else:
+ if not self._format.can_write(request):
+ raise InitializationError(
+ f"`{self._format.name}`" f" can not write to `{source}`."
+ )
+
+ def legacy_get_reader(self, **kwargs):
+ """legacy_get_reader(**kwargs)
+
+ a utility method to provide support vor the V2.9 API
+
+ Parameters
+ ----------
+ kwargs : ...
+ Further keyword arguments are passed to the reader. See :func:`.help`
+ to see what arguments are available for a particular format.
+ """
+
+ # Note: this will break thread-safety
+ self._request._kwargs = kwargs
+
+ # safeguard for DICOM plugin reading from folders
+ try:
+ assert Path(self._request.filename).is_dir()
+ except OSError:
+ pass # not a valid path on this OS
+ except AssertionError:
+ pass # not a folder
+ else:
+ return self._format.get_reader(self._request)
+
+ self._request.get_file().seek(0)
+ return self._format.get_reader(self._request)
+
+ def read(self, *, index=None, **kwargs):
+ """
+ Parses the given URI and creates a ndarray from it.
+
+ Parameters
+ ----------
+ index : {integer, None}
+ If the URI contains a list of ndimages return the index-th
+ image. If None, stack all images into an ndimage along the
+ 0-th dimension (equivalent to np.stack(imgs, axis=0)).
+ kwargs : ...
+ Further keyword arguments are passed to the reader. See
+ :func:`.help` to see what arguments are available for a particular
+ format.
+
+ Returns
+ -------
+ ndimage : np.ndarray
+ A numpy array containing the decoded image data.
+
+ """
+
+ if index is None:
+ index = _legacy_default_index(self._format)
+
+ if index is Ellipsis:
+ img = np.stack([im for im in self.iter(**kwargs)])
+ return img
+
+ reader = self.legacy_get_reader(**kwargs)
+ return reader.get_data(index)
+
+ def legacy_get_writer(self, **kwargs):
+ """legacy_get_writer(**kwargs)
+
+ Returns a :class:`.Writer` object which can be used to write data
+ and meta data to the specified file.
+
+ Parameters
+ ----------
+ kwargs : ...
+ Further keyword arguments are passed to the writer. See :func:`.help`
+ to see what arguments are available for a particular format.
+ """
+
+ # Note: this will break thread-safety
+ self._request._kwargs = kwargs
+ return self._format.get_writer(self._request)
+
+ def write(self, ndimage, *, is_batch=None, metadata=None, **kwargs):
+ """
+ Write an ndimage to the URI specified in path.
+
+ If the URI points to a file on the current host and the file does not
+ yet exist it will be created. If the file exists already, it will be
+ appended if possible; otherwise, it will be replaced.
+
+ Parameters
+ ----------
+ ndimage : numpy.ndarray
+ The ndimage or list of ndimages to write.
+ is_batch : bool
+ If True, treat the supplied ndimage as a batch of images. If False,
+ treat the supplied ndimage as a single image. If None, try to
+ determine ``is_batch`` from the ndimage's shape and ndim.
+ metadata : dict
+ The metadata passed to write alongside the image.
+ kwargs : ...
+ Further keyword arguments are passed to the writer. See
+ :func:`.help` to see what arguments are available for a
+ particular format.
+
+
+ Returns
+ -------
+ buffer : bytes
+ When writing to the special target "", this function will
+ return the encoded image data as a bytes string. Otherwise it
+ returns None.
+
+ Notes
+ -----
+ Automatically determining ``is_batch`` may fail for some images due to
+ shape aliasing. For example, it may classify a channel-first color image
+ as a batch of gray images. In most cases this automatic deduction works
+ fine (it has for almost a decade), but if you do have one of those edge
+ cases (or are worried that you might) consider explicitly setting
+ ``is_batch``.
+
+ """
+
+ if is_batch or isinstance(ndimage, (list, tuple)):
+ pass # ndimage is list of images
+ elif is_batch is False:
+ ndimage = [ndimage]
+ else:
+ # Write the largest possible block by guessing the meaning of each
+ # dimension from the shape/ndim and then checking if any batch
+ # dimensions are left.
+ ndimage = np.asanyarray(ndimage)
+ batch_dims = ndimage.ndim
+
+ # two spatial dimensions
+ batch_dims = max(batch_dims - 2, 0)
+
+ # packed (channel-last) image
+ if ndimage.ndim >= 3 and ndimage.shape[-1] < 5:
+ batch_dims = max(batch_dims - 1, 0)
+
+ # format supports volumetric images
+ ext_infos = known_extensions.get(self._request.extension, list())
+ for ext_info in ext_infos:
+ if self._format.name in ext_info.priority and ext_info.volume_support:
+ batch_dims = max(batch_dims - 1, 0)
+ break
+
+ if batch_dims == 0:
+ ndimage = [ndimage]
+
+ with self.legacy_get_writer(**kwargs) as writer:
+ for image in ndimage:
+ image = np.asanyarray(image)
+
+ if image.ndim < 2:
+ raise ValueError(
+ "The image must have at least two spatial dimensions."
+ )
+
+ if not np.issubdtype(image.dtype, np.number) and not np.issubdtype(
+ image.dtype, bool
+ ):
+ raise ValueError(
+ f"All images have to be numeric, and not `{image.dtype}`."
+ )
+
+ writer.append_data(image, metadata)
+
+ return writer.request.get_result()
+
+ def iter(self, **kwargs):
+ """Iterate over a list of ndimages given by the URI
+
+ Parameters
+ ----------
+ kwargs : ...
+ Further keyword arguments are passed to the reader. See
+ :func:`.help` to see what arguments are available for a particular
+ format.
+ """
+
+ reader = self.legacy_get_reader(**kwargs)
+ for image in reader:
+ yield image
+
+ def properties(self, index=None):
+ """Standardized ndimage metadata.
+
+ Parameters
+ ----------
+ index : int
+ The index of the ndimage for which to return properties. If the
+ index is out of bounds a ``ValueError`` is raised. If ``None``,
+ return the properties for the ndimage stack. If this is impossible,
+ e.g., due to shape mismatch, an exception will be raised.
+
+ Returns
+ -------
+ properties : ImageProperties
+ A dataclass filled with standardized image metadata.
+
+ """
+
+ if index is None:
+ index = _legacy_default_index(self._format)
+
+ # for backwards compatibility ... actually reads pixel data :(
+ if index is Ellipsis:
+ image = self.read(index=0)
+ n_images = self.legacy_get_reader().get_length()
+ return ImageProperties(
+ shape=(n_images, *image.shape),
+ dtype=image.dtype,
+ n_images=n_images,
+ is_batch=True,
+ )
+
+ image = self.read(index=index)
+ return ImageProperties(
+ shape=image.shape,
+ dtype=image.dtype,
+ is_batch=False,
+ )
+
+ def get_meta(self, *, index=None):
+ """Read ndimage metadata from the URI
+
+ Parameters
+ ----------
+ index : {integer, None}
+ If the URI contains a list of ndimages return the metadata
+ corresponding to the index-th image. If None, behavior depends on
+ the used api
+
+ Legacy-style API: return metadata of the first element (index=0)
+ New-style API: Behavior depends on the used Plugin.
+
+ Returns
+ -------
+ metadata : dict
+ A dictionary of metadata.
+
+ """
+
+ return self.metadata(index=index, exclude_applied=False)
+
+ def metadata(self, index=None, exclude_applied: bool = True):
+ """Format-Specific ndimage metadata.
+
+ Parameters
+ ----------
+ index : int
+ The index of the ndimage to read. If the index is out of bounds a
+ ``ValueError`` is raised. If ``None``, global metadata is returned.
+ exclude_applied : bool
+ This parameter exists for compatibility and has no effect. Legacy
+ plugins always report all metadata they find.
+
+ Returns
+ -------
+ metadata : dict
+ A dictionary filled with format-specific metadata fields and their
+ values.
+
+ """
+
+ if index is None:
+ index = _legacy_default_index(self._format)
+
+ return self.legacy_get_reader().get_meta_data(index=index)
+
+ def __del__(self) -> None:
+ pass
+ # turns out we can't close the file here for LegacyPlugin
+ # because it would break backwards compatibility
+ # with legacy_get_writer and legacy_get_reader
+ # self._request.finish()
diff --git a/Lib/site-packages/imageio/core/legacy_plugin_wrapper.pyi b/Lib/site-packages/imageio/core/legacy_plugin_wrapper.pyi
new file mode 100644
index 0000000..52e3ec5
--- /dev/null
+++ b/Lib/site-packages/imageio/core/legacy_plugin_wrapper.pyi
@@ -0,0 +1,27 @@
+import numpy as np
+from typing import Optional, Dict, Any, Union, List, Iterator
+
+from .request import Request
+from .v3_plugin_api import PluginV3, ImageProperties
+from .format import Format
+from ..typing import ArrayLike
+
+class LegacyPlugin(PluginV3):
+ def __init__(self, request: Request, legacy_plugin: Format) -> None: ...
+ def legacy_get_reader(self, **kwargs) -> Format.Reader: ...
+ def read(self, *, index: Optional[int] = 0, **kwargs) -> np.ndarray: ...
+ def legacy_get_writer(self, **kwargs) -> Format.Writer: ...
+ def write(
+ self,
+ ndimage: Union[ArrayLike, List[ArrayLike]],
+ *,
+ is_batch: bool = None,
+ **kwargs
+ ) -> Optional[bytes]: ...
+ def iter(self, **kwargs) -> Iterator[np.ndarray]: ...
+ def properties(self, index: Optional[int] = 0) -> ImageProperties: ...
+ def get_meta(self, *, index: Optional[int] = 0) -> Dict[str, Any]: ...
+ def metadata(
+ self, index: Optional[int] = 0, exclude_applied: bool = True
+ ) -> Dict[str, Any]: ...
+ def __del__(self) -> None: ...
diff --git a/Lib/site-packages/imageio/core/request.py b/Lib/site-packages/imageio/core/request.py
new file mode 100644
index 0000000..f42da19
--- /dev/null
+++ b/Lib/site-packages/imageio/core/request.py
@@ -0,0 +1,751 @@
+# -*- coding: utf-8 -*-
+# imageio is distributed under the terms of the (new) BSD License.
+
+"""
+Definition of the Request object, which acts as a kind of bridge between
+what the user wants and what the plugins can.
+"""
+
+import os
+from io import BytesIO
+import zipfile
+import tempfile
+import shutil
+import enum
+import warnings
+
+from ..core import urlopen, get_remote_file
+
+from pathlib import Path
+from urllib.parse import urlparse
+from typing import Optional
+
+# URI types
+URI_BYTES = 1
+URI_FILE = 2
+URI_FILENAME = 3
+URI_ZIPPED = 4
+URI_HTTP = 5
+URI_FTP = 6
+
+
+class IOMode(str, enum.Enum):
+ """Available Image modes
+
+ This is a helper enum for ``Request.Mode`` which is a composite of a
+ ``Request.ImageMode`` and ``Request.IOMode``. The IOMode that tells the
+ plugin if the resource should be read from or written to. Available values are
+
+ - read ("r"): Read from the specified resource
+ - write ("w"): Write to the specified resource
+
+ """
+
+ read = "r"
+ write = "w"
+
+
+class ImageMode(str, enum.Enum):
+ """Available Image modes
+
+ This is a helper enum for ``Request.Mode`` which is a composite of a
+ ``Request.ImageMode`` and ``Request.IOMode``. The image mode that tells the
+ plugin the desired (and expected) image shape. Available values are
+
+ - single_image ("i"): Return a single image extending in two spacial
+ dimensions
+ - multi_image ("I"): Return a list of images extending in two spacial
+ dimensions
+ - single_volume ("v"): Return an image extending into multiple dimensions.
+ E.g. three spacial dimensions for image stacks, or two spatial and one
+ time dimension for videos
+ - multi_volume ("V"): Return a list of images extending into multiple
+ dimensions.
+ - any_mode ("?"): Return an image in any format (the plugin decides the
+ appropriate action).
+
+ """
+
+ single_image = "i"
+ multi_image = "I"
+ single_volume = "v"
+ multi_volume = "V"
+ any_mode = "?"
+
+
+@enum.unique
+class Mode(str, enum.Enum):
+ """The mode to use when interacting with the resource
+
+ ``Request.Mode`` is a composite of ``Request.ImageMode`` and
+ ``Request.IOMode``. The image mode that tells the plugin the desired (and
+ expected) image shape and the ``Request.IOMode`` tells the plugin the way
+ the resource should be interacted with. For a detailed description of the
+ available modes, see the documentation for ``Request.ImageMode`` and
+ ``Request.IOMode`` respectively.
+
+ Available modes are all combinations of ``Request.IOMode`` and ``Request.ImageMode``:
+
+ - read_single_image ("ri")
+ - read_multi_image ("rI")
+ - read_single_volume ("rv")
+ - read_multi_volume ("rV")
+ - read_any ("r?")
+ - write_single_image ("wi")
+ - write_multi_image ("wI")
+ - write_single_volume ("wv")
+ - write_multi_volume ("wV")
+ - write_any ("w?")
+
+ Examples
+ --------
+ >>> Request.Mode("rI") # a list of simple images should be read from the resource
+ >>> Request.Mode("wv") # a single volume should be written to the resource
+
+ """
+
+ read_single_image = "ri"
+ read_multi_image = "rI"
+ read_single_volume = "rv"
+ read_multi_volume = "rV"
+ read_any = "r?"
+ write_single_image = "wi"
+ write_multi_image = "wI"
+ write_single_volume = "wv"
+ write_multi_volume = "wV"
+ write_any = "w?"
+
+ @classmethod
+ def _missing_(cls, value):
+ """Enable Mode("r") and Mode("w")
+
+ The sunder method ``_missing_`` is called whenever the constructor fails
+ to directly look up the corresponding enum value from the given input.
+ In our case, we use it to convert the modes "r" and "w" (from the v3
+ API) into their legacy versions "r?" and "w?".
+
+ More info on _missing_:
+ https://docs.python.org/3/library/enum.html#supported-sunder-names
+ """
+
+ if value == "r":
+ return cls("r?")
+ elif value == "w":
+ return cls("w?")
+ else:
+ raise ValueError(f"{value} is no valid Mode.")
+
+ @property
+ def io_mode(self) -> IOMode:
+ return IOMode(self.value[0])
+
+ @property
+ def image_mode(self) -> ImageMode:
+ return ImageMode(self.value[1])
+
+ def __getitem__(self, key):
+ """For backwards compatibility with the old non-enum modes"""
+ if key == 0:
+ return self.io_mode
+ elif key == 1:
+ return self.image_mode
+ else:
+ raise IndexError(f"Mode has no item {key}")
+
+
+SPECIAL_READ_URIS = "